001    /*
002     *  Licensed to the Apache Software Foundation (ASF) under one
003     *  or more contributor license agreements.  See the NOTICE file
004     *  distributed with this work for additional information
005     *  regarding copyright ownership.  The ASF licenses this file
006     *  to you under the Apache License, Version 2.0 (the
007     *  "License"); you may not use this file except in compliance
008     *  with the License.  You may obtain a copy of the License at
009     *  
010     *    http://www.apache.org/licenses/LICENSE-2.0
011     *  
012     *  Unless required by applicable law or agreed to in writing,
013     *  software distributed under the License is distributed on an
014     *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
015     *  KIND, either express or implied.  See the License for the
016     *  specific language governing permissions and limitations
017     *  under the License. 
018     *  
019     */
020    package org.apache.directory.shared.ldap.schema.ldif.extractor.impl;
021    
022    
023    import java.io.File;
024    import java.io.FileNotFoundException;
025    import java.io.FileOutputStream;
026    import java.io.FileWriter;
027    import java.io.IOException;
028    import java.io.InputStream;
029    import java.io.InvalidObjectException;
030    import java.net.URL;
031    import java.util.Enumeration;
032    import java.util.Map;
033    import java.util.Stack;
034    import java.util.UUID;
035    import java.util.Map.Entry;
036    import java.util.regex.Pattern;
037    
038    import org.apache.directory.shared.i18n.I18n;
039    import org.apache.directory.shared.ldap.constants.SchemaConstants;
040    import org.apache.directory.shared.ldap.exception.LdapException;
041    import org.apache.directory.shared.ldap.ldif.LdapLdifException;
042    import org.apache.directory.shared.ldap.ldif.LdifEntry;
043    import org.apache.directory.shared.ldap.ldif.LdifReader;
044    import org.apache.directory.shared.ldap.schema.ldif.extractor.SchemaLdifExtractor;
045    import org.apache.directory.shared.ldap.schema.ldif.extractor.UniqueResourceException;
046    import org.slf4j.Logger;
047    import org.slf4j.LoggerFactory;
048    
049    
050    /**
051     * Extracts LDIF files for the schema repository onto a destination directory.
052     *
053     * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
054     * @version $Rev: 664295 $
055     */
056    public class DefaultSchemaLdifExtractor implements SchemaLdifExtractor
057    {
058        private static final String BASE_PATH = "";
059    
060        private static final String SCHEMA_SUBDIR = "schema";
061    
062        private static final Logger LOG = LoggerFactory.getLogger( DefaultSchemaLdifExtractor.class );
063    
064        private boolean extracted;
065    
066        private File outputDirectory;
067    
068        private File schemaDirectory;
069    
070    
071        /**
072         * Creates an extractor which deposits files into the specified output
073         * directory.
074         *
075         * @param outputDirectory the directory where the schema root is extracted
076         */
077        public DefaultSchemaLdifExtractor( File outputDirectory )
078        {
079            LOG.debug( "BASE_PATH set to {}, outputDirectory set to {}", BASE_PATH, outputDirectory );
080            this.outputDirectory = outputDirectory;
081            this.schemaDirectory = new File( outputDirectory, SCHEMA_SUBDIR );
082    
083            if ( ! outputDirectory.exists() )
084            {
085                LOG.debug( "Creating output directory: {}", outputDirectory );
086                if( ! outputDirectory.mkdir() )
087                {
088                    LOG.error( "Failed to create outputDirectory: {}", outputDirectory );
089                }
090            }
091            else
092            {
093                LOG.debug( "Output directory exists: no need to create." );
094            }
095    
096            if ( ! schemaDirectory.exists() )
097            {
098                LOG.info( "Schema directory '{}' does NOT exist: extracted state set to false.", schemaDirectory );
099                extracted = false;
100            }
101            else
102            {
103                LOG.info( "Schema directory '{}' does exist: extracted state set to true.", schemaDirectory );
104                extracted = true;
105            }
106        }
107    
108    
109        /**
110         * Gets whether or not schema folder has been created or not.
111         *
112         * @return true if schema folder has already been extracted.
113         */
114        public boolean isExtracted()
115        {
116            return extracted;
117        }
118    
119    
120        /**
121         * Extracts the LDIF files from a Jar file or copies exploded LDIF resources.
122         *
123         * @param overwrite over write extracted structure if true, false otherwise
124         * @throws IOException if schema already extracted and on IO errors
125         */
126        public void extractOrCopy( boolean overwrite ) throws IOException
127        {
128            if ( ! outputDirectory.exists() )
129            {
130                outputDirectory.mkdir();
131            }
132    
133            File schemaDirectory = new File( outputDirectory, SCHEMA_SUBDIR );
134    
135            if ( ! schemaDirectory.exists() )
136            {
137                schemaDirectory.mkdir();
138            }
139            else if ( ! overwrite )
140            {
141                throw new IOException( I18n.err( I18n.ERR_08001, schemaDirectory.getAbsolutePath() ) );
142            }
143    
144            Pattern pattern = Pattern.compile( ".*schema/ou=schema.*\\.ldif" );
145            Map<String,Boolean> list = ResourceMap.getResources( pattern );
146    
147            for ( Entry<String,Boolean> entry : list.entrySet() )
148            {
149                if ( entry.getValue() )
150                {
151                    extractFromJar( entry.getKey() );
152                }
153                else
154                {
155                    File resource = new File( entry.getKey() );
156                    copyFile( resource, getDestinationFile( resource ) );
157                }
158            }
159        }
160        
161        
162        /**
163         * Extracts the LDIF files from a Jar file or copies exploded LDIF
164         * resources without overwriting the resources if the schema has
165         * already been extracted.
166         *
167         * @throws IOException if schema already extracted and on IO errors
168         */
169        public void extractOrCopy() throws IOException
170        {
171            extractOrCopy( false );
172        }
173        
174        
175        /**
176         * Copies a file line by line from the source file argument to the 
177         * destination file argument.
178         *
179         * @param source the source file to copy
180         * @param destination the destination to copy the source to
181         * @throws IOException if there are IO errors or the source does not exist
182         */
183        private void copyFile( File source, File destination ) throws IOException
184        {
185            LOG.debug( "copyFile(): source = {}, destination = {}", source, destination );
186            
187            if ( ! destination.getParentFile().exists() )
188            {
189                destination.getParentFile().mkdirs();
190            }
191            
192            if ( ! source.getParentFile().exists() )
193            {
194                throw new FileNotFoundException( I18n.err( I18n.ERR_08002, source.getAbsolutePath() ) );
195            }
196            
197            FileWriter out = new FileWriter( destination );
198            
199            try
200            {
201                LdifReader ldifReader = new LdifReader( source );
202                boolean first = true;
203                LdifEntry ldifEntry = null;
204                
205                while ( ldifReader.hasNext() )
206                {
207                    if ( first )
208                    {
209                        ldifEntry = ldifReader.next();
210                        
211                        if ( ldifEntry.get( SchemaConstants.ENTRY_UUID_AT ) == null )
212                        {
213                            // No UUID, let's create one
214                            UUID entryUuid = UUID.randomUUID();
215                            ldifEntry.addAttribute( SchemaConstants.ENTRY_UUID_AT, entryUuid.toString() );
216                        }
217                        
218                        first = false;
219                    }
220                    else
221                    {
222                        // throw an exception : we should not have more than one entry per schema ldif file
223                        String msg = I18n.err( I18n.ERR_08003, source );
224                        LOG.error( msg );
225                        throw new InvalidObjectException( msg );
226                    }
227                }
228    
229                ldifReader.close();
230                
231                // Add the version at the first line, to avoid a warning
232                String ldifString = "version: 1\n" + ldifEntry.toString();
233                
234                out.write( ldifString );
235                out.flush();
236            }
237            catch ( LdapLdifException ne )
238            {
239                // throw an exception : we should not have more than one entry per schema ldif file
240                String msg = I18n.err( I18n.ERR_08004, source, ne.getLocalizedMessage() );
241                LOG.error( msg );
242                throw new InvalidObjectException( msg );
243            }
244            catch ( LdapException ne )
245            {
246                // throw an exception : we should not have more than one entry per schema ldif file
247                String msg = I18n.err( I18n.ERR_08004, source, ne.getLocalizedMessage() );
248                LOG.error( msg );
249                throw new InvalidObjectException( msg );
250            }
251            finally
252            {
253                out.close();
254            }
255        }
256    
257        
258        /**
259         * Assembles the destination file by appending file components previously
260         * pushed on the fileComponentStack argument.
261         *
262         * @param fileComponentStack stack containing pushed file components
263         * @return the assembled destination file
264         */
265        private File assembleDestinationFile( Stack<String> fileComponentStack )
266        {
267            File destinationFile = outputDirectory.getAbsoluteFile();
268            
269            while ( ! fileComponentStack.isEmpty() )
270            {
271                destinationFile = new File( destinationFile, fileComponentStack.pop() );
272            }
273            
274            return destinationFile;
275        }
276        
277        
278        /**
279         * Calculates the destination file.
280         *
281         * @param resource the source file
282         * @return the destination file's parent directory
283         */
284        private File getDestinationFile( File resource )
285        {
286            File parent = resource.getParentFile();
287            Stack<String> fileComponentStack = new Stack<String>();
288            fileComponentStack.push( resource.getName() );
289            
290            while ( parent != null )
291            {
292                if ( parent.getName().equals( "schema" ) )
293                {
294                    // All LDIF files besides the schema.ldif are under the 
295                    // schema/schema base path. So we need to add one more 
296                    // schema component to all LDIF files minus this schema.ldif
297                    fileComponentStack.push( "schema" );
298                    
299                    return assembleDestinationFile( fileComponentStack );
300                }
301    
302                fileComponentStack.push( parent.getName() );
303                
304                if ( parent.equals( parent.getParentFile() )
305                        || parent.getParentFile() == null )
306                {
307                    throw new IllegalStateException( I18n.err( I18n.ERR_08005 ) );
308                }
309                
310                parent = parent.getParentFile();
311            }
312    
313            /*
314    
315               this seems retarded so I replaced it for now with what is below it
316               will not break from loop above unless parent == null so the if is
317               never executed - just the else is executed every time
318    
319            if ( parent != null )
320            {
321                return assembleDestinationFile( fileComponentStack );
322            }
323            else
324            {
325                throw new IllegalStateException( "parent cannot be null" );
326            }
327            
328            */
329    
330            throw new IllegalStateException( I18n.err( I18n.ERR_08006 ) );
331        }
332        
333        
334        /**
335         * Gets the DBFILE resource from within a jar off the base path.  If another jar
336         * with such a DBFILE resource exists then an error will result since the resource
337         * is not unique across all the jars.
338         *
339         * @param resourceName the file name of the resource to load
340         * @param resourceDescription human description of the resource
341         * @return the InputStream to read the contents of the resource
342         * @throws IOException if there are problems reading or finding a unique copy of the resource
343         */                                                                                                
344        public static InputStream getUniqueResourceAsStream( String resourceName, String resourceDescription ) throws IOException
345        {
346            resourceName = BASE_PATH + resourceName;
347            URL result = getUniqueResource( resourceName, resourceDescription );
348            return result.openStream();
349        }
350    
351    
352        /**
353         * Gets a unique resource from a Jar file.
354         * 
355         * @param resourceName the name of the resource
356         * @param resourceDescription the description of the resource
357         * @return the URL to the resource in the Jar file
358         * @throws IOException if there is an IO error
359         */
360        public static URL getUniqueResource( String resourceName, String resourceDescription )
361                throws IOException
362        {
363            Enumeration<URL> resources = DefaultSchemaLdifExtractor.class.getClassLoader().getResources( resourceName );
364            if ( !resources.hasMoreElements() )
365            {
366                throw new UniqueResourceException( resourceName, resourceDescription );
367            }
368            URL result = resources.nextElement();
369            if ( resources.hasMoreElements() )
370            {
371                throw new UniqueResourceException( resourceName, result, resources, resourceDescription);
372            }
373            return result;
374        }
375        
376    
377        /**
378         * Extracts the LDIF schema resource from a Jar.
379         *
380         * @param resource the LDIF schema resource
381         * @throws IOException if there are IO errors
382         */
383        private void extractFromJar( String resource ) throws IOException
384        {
385            byte[] buf = new byte[512];
386            InputStream in = DefaultSchemaLdifExtractor.getUniqueResourceAsStream( resource,
387                "LDIF file in schema repository" );
388    
389            try
390            {
391                File destination = new File( outputDirectory, resource );
392    
393                /*
394                 * Do not overwrite an LDIF file if it has already been extracted.
395                 */
396                if ( destination.exists() )
397                {
398                    return;
399                }
400            
401                if ( ! destination.getParentFile().exists() )
402                {
403                    destination.getParentFile().mkdirs();
404                }
405                
406                FileOutputStream out = new FileOutputStream( destination );
407                try
408                {
409                    while ( in.available() > 0 )
410                    {
411                        int readCount = in.read( buf );
412                        out.write( buf, 0, readCount );
413                    }
414                    out.flush();
415                } finally
416                {
417                    out.close();
418                }
419            }
420            finally
421            {
422                in.close();
423            }
424        }
425    }