ladle 0.2.0-java → 1.0.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGELOG.md +18 -0
  3. data/CUSTOM-SCHEMA.md +20 -45
  4. data/README.md +12 -8
  5. data/lib/ladle/apacheds/annotations-1.0.0.jar +0 -0
  6. data/lib/ladle/apacheds/apacheds-all-2.0.0-M16.jar +0 -0
  7. data/lib/ladle/apacheds/apacheds-jdbm2-2.0.0-M2.jar +0 -0
  8. data/lib/ladle/apacheds/bcel-5.1.jar +0 -0
  9. data/lib/ladle/apacheds/commons-cli-1.2.jar +0 -0
  10. data/lib/ladle/apacheds/coreplugin-1.0.0.jar +0 -0
  11. data/lib/ladle/apacheds/findbugs-1.0.0.jar +0 -0
  12. data/lib/ladle/apacheds/findbugs-ant-1.0.0.jar +0 -0
  13. data/lib/ladle/apacheds/findbugsGUI-1.0.0.jar +0 -0
  14. data/lib/ladle/apacheds/log4j-1.2.17.jar +0 -0
  15. data/lib/ladle/apacheds/regexp-1.2.jar +0 -0
  16. data/lib/ladle/apacheds/slf4j-api-1.7.7.jar +0 -0
  17. data/lib/ladle/apacheds/slf4j-log4j12-1.7.7.jar +0 -0
  18. data/lib/ladle/java/log4j-quiet.properties +8 -0
  19. data/lib/ladle/java/net/detailedbalance/ladle/LadleFatalException.class +0 -0
  20. data/lib/ladle/java/net/detailedbalance/ladle/Main$1.class +0 -0
  21. data/lib/ladle/java/net/detailedbalance/ladle/Main.class +0 -0
  22. data/lib/ladle/java/net/detailedbalance/ladle/Main.java +9 -9
  23. data/lib/ladle/java/net/detailedbalance/ladle/Server.class +0 -0
  24. data/lib/ladle/java/net/detailedbalance/ladle/Server.java +214 -81
  25. data/lib/ladle/jruby_process.rb +5 -1
  26. data/lib/ladle/server.rb +23 -7
  27. data/lib/ladle/version.rb +1 -1
  28. data/spec/ladle/animals-custom-schema.ldif +26 -0
  29. data/spec/ladle/animals-custom.ldif +0 -2
  30. data/spec/ladle/server_spec.rb +6 -11
  31. metadata +161 -178
  32. data/lib/ladle/Ladle.iml +0 -13
  33. data/lib/ladle/apacheds/antlr-2.7.6.jar +0 -0
  34. data/lib/ladle/apacheds/apacheds-core-1.0.2.jar +0 -0
  35. data/lib/ladle/apacheds/apacheds-core-shared-1.0.2.jar +0 -0
  36. data/lib/ladle/apacheds/apacheds-kerberos-shared-1.0.2.jar +0 -0
  37. data/lib/ladle/apacheds/apacheds-protocol-changepw-1.0.2.jar +0 -0
  38. data/lib/ladle/apacheds/apacheds-protocol-kerberos-1.0.2.jar +0 -0
  39. data/lib/ladle/apacheds/apacheds-protocol-ldap-1.0.2.jar +0 -0
  40. data/lib/ladle/apacheds/apacheds-protocol-ntp-1.0.2.jar +0 -0
  41. data/lib/ladle/apacheds/apacheds-protocol-shared-1.0.2.jar +0 -0
  42. data/lib/ladle/apacheds/apacheds-server-jndi-1.0.2.jar +0 -0
  43. data/lib/ladle/apacheds/apacheds-server-main-1.0.2.jar +0 -0
  44. data/lib/ladle/apacheds/apacheds-server-ssl-1.0.2.jar +0 -0
  45. data/lib/ladle/apacheds/backport-util-concurrent-2.2.jar +0 -0
  46. data/lib/ladle/apacheds/commons-cli-1.0.jar +0 -0
  47. data/lib/ladle/apacheds/commons-collections-3.2.jar +0 -0
  48. data/lib/ladle/apacheds/commons-lang-2.1.jar +0 -0
  49. data/lib/ladle/apacheds/jcl-over-slf4j-1.5.6.jar +0 -0
  50. data/lib/ladle/apacheds/jdbm-1.0.jar +0 -0
  51. data/lib/ladle/apacheds/log4j-1.2.14.jar +0 -0
  52. data/lib/ladle/apacheds/mina-core-1.0.2.jar +0 -0
  53. data/lib/ladle/apacheds/mina-filter-ssl-1.0.2.jar +0 -0
  54. data/lib/ladle/apacheds/shared-asn1-0.9.5.5.jar +0 -0
  55. data/lib/ladle/apacheds/shared-asn1-codec-0.9.5.5.jar +0 -0
  56. data/lib/ladle/apacheds/shared-ldap-0.9.5.5.jar +0 -0
  57. data/lib/ladle/apacheds/slf4j-api-1.5.6.jar +0 -0
  58. data/lib/ladle/apacheds/slf4j-log4j12-1.5.6.jar +0 -0
  59. data/lib/ladle/apacheds/spring-beans-1.2.8.jar +0 -0
  60. data/lib/ladle/apacheds/spring-context-1.2.8.jar +0 -0
  61. data/lib/ladle/apacheds/spring-core-1.2.8.jar +0 -0
  62. data/lib/ladle/apacheds/xercesImpl-2.0.2.jar +0 -0
  63. data/spec/ladle/animal-schema.jar +0 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: b61678683a47fecc7521358a2530342a7b99b60d
4
+ data.tar.gz: c0e3a05741a6bd4a477abe9b7000458393f553c0
5
+ SHA512:
6
+ metadata.gz: 2b271a4617258ac619ec03c196a6d1ae8921e972379666cbcd21bbf2f30d33c9072a6f5396d1f27d3410e06ce5193822649109dc17f0e203700ee7598e43edc4
7
+ data.tar.gz: 925068ddf2291cfb97c63c2a931c0c87b8df0ea24ad98d17e8483aec48a415cc9e45dfec6e4dedf02532a421e9f732b9e41002b49641fa3496c3a0442e89a545
@@ -1,3 +1,21 @@
1
+ 1.0.0
2
+ =====
3
+
4
+ - Updated to apacheDS 2.0.0-M16 from 1.0.2. (#14, #17, #18; @silarsis, @calavera, @http-418)
5
+ - BACKWARDS_INCOMPATIBLE CHANGE: Custom schemas are now defined in LDIF files,
6
+ not via the former baroque java system. See CUSTOM-SCHEMA.md for details. (#14, @silarsis)
7
+ - Upgraded dependency to net-ldap-0.3.1 (#14, @silarsis)
8
+ - Added pom.xml to simplify downloading future apacheDS updates (#18, @http-418)
9
+ - Avoid EOF error on `Server#stop`. (#12; @iRyusa)
10
+ - Drop support for Ruby 1.8.7.
11
+
12
+ 0.2.1
13
+ =====
14
+
15
+ - Improve error handling on newer versions of JRuby.
16
+ - Loosen open4 dependency for wider compatibility with other gems.
17
+ - Correct Cucumber snippets in readme. (#8)
18
+
1
19
  0.2.0
2
20
  =====
3
21
 
@@ -1,61 +1,36 @@
1
1
  Custom Schemas in Ladle
2
2
  =======================
3
3
 
4
- If you need to use LDAP classes other the standard ones, you'll need
5
- to define and include a custom schema. There are three steps in this
6
- process:
4
+ If you need to use LDAP classes other the standard ones, you'll need to define
5
+ and include a custom schema. As of version 2.0 of apacheDS, this is done via
6
+ ldif files. You can create the appropriate schema elements in a stand-alone ldif
7
+ file and specify that be loaded prior to any data files.
7
8
 
8
- Create or obtain the schema in openldap format
9
- ----------------------------------------------
9
+ For an example of how this should look, please refer to
10
+ `spec/ladle/animals-custom-schema.ldif`.
10
11
 
11
- All the details are there in the step name.
12
+ `CN=other,OU=schema` is a good place to put your own custom attributes
13
+ and object types. There is a "test branch" starting with 2.25 which can
14
+ be used for self-generated oids, if you're making things up yourself
15
+ - check [this stackoverflow question][so] for more info.
12
16
 
13
- Generate the java representation of the schema
14
- ----------------------------------------------
15
-
16
- The embedded LDAP server in ladle is ApacheDS 1.0.2. That project
17
- provides [documentation][ds-custom] of how to build custom schemas;
18
- look at the section titled "Creating a Maven module for your custom
19
- schema." As you might guess from the title, you'll need [maven
20
- 2][mvn] to do this.
21
-
22
- The process has one snag -- after you generate the schema using
23
- `apacheds-schema-archetype.sh`, you'll need to modify the generated
24
- `pom.xml`. Under the this code:
25
-
26
- <plugins>
27
- <plugin>
28
- <groupId>org.apache.directory.server</groupId>
29
- <artifactId>apacheds-core-plugin</artifactId>
30
-
31
- Add the line:
32
-
33
- <version>1.0.2</version>
34
-
35
- Then continue with the directions.
36
-
37
- [ds-custom]: http://directory.apache.org/apacheds/1.0/custom-schema.html.
38
- [mvn]: http://maven.apache.org/
17
+ [so]: http://stackoverflow.com/questions/725837/experimental-private-branch-for-oid-numbers-in-ldap-schemas
39
18
 
40
19
  Configure ladle to use the custom schema
41
20
  ----------------------------------------
42
21
 
43
- At the end of the java schema generation step, you'll have a jar file
44
- under `target` containing several classes representing the the schema.
45
- Put that jar somewhere in your project, then configure the
22
+ Put the ldif somewhere in your project, then configure the
46
23
  {Ladle::Server} instance to point to it:
47
24
 
48
25
  Ladle::Server.new(
49
- :additional_classpath => %w(path/to/sample-schema-1.0-SNAPSHOT.jar),
50
- :custom_schemas => %w(com.example.schema.TestSchema),
51
- :ldif => "path/to/schema-using.ldif",
26
+ :custom_schemas => "path/to/schema.ldif",
27
+ :ldif => "path/to/data-that-uses-the-schema.ldif",
52
28
  :domain => "dc=example,dc=com"
53
29
  )
54
30
 
55
- The custom schema classname is derived from the first argument you
56
- passed to `apacheds-schema-archtype.sh` and the name of your schema
57
- file. In the example above, it's as if you ran
58
-
59
- apacheds-schema-archetype.sh com.example.schema sample-schema
60
-
61
- And then named the schema file `test.schema`.
31
+ You may also combine the custom schema declarations in the data LDIF (the file
32
+ named by the `:ldif` option). If you do this, do can skip the `:custom_schemas`
33
+ option entirely. The separate `:custom_schemas` option is nice if you use the
34
+ same schema but different data in different tests, or if you use an externally-
35
+ provided schema. If your tests aren't that complicated, then combining them into
36
+ one file has no downsides.
data/README.md CHANGED
@@ -9,12 +9,16 @@ It spins up an actual LDAP server instance, so you can use it to test
9
9
  any sort of client application &mdash; anything that communicates over
10
10
  the standard LDAP protocol.
11
11
 
12
- Ladle itself is tested on both JRuby 1.5.2 and Ruby 1.8.7 and 1.9.1.
12
+ Ladle itself is tested on both JRuby 1.7.11 and Ruby 1.9.3, 2.0.0, and 2.1.2.
13
13
  It is a wrapper around [ApacheDS][] (a pure-java embeddable LDAP
14
- server), so it needs Java 1.5 or later available whether you are using
14
+ server), so it needs Java 7 or later available whether you are using
15
15
  JRuby or not.
16
16
 
17
- [ApacheDS]: http://directory.apache.org/apacheds/1.0/index.html
17
+ Ladle will not work with MRI on Windows. (A pull request adding this support
18
+ would be eagerly reviewed.) It should work with JRuby on Windows, though this
19
+ hasn't been tested.
20
+
21
+ [ApacheDS]: http://directory.apache.org/apacheds/
18
22
 
19
23
  Ladle in 30 seconds
20
24
  -------------------
@@ -85,7 +89,7 @@ To use a server per test, use Cucumber's `Around` [hook][cucumber-hooks]:
85
89
  If you want just one server, consider something like this:
86
90
 
87
91
  Before('@ldap') do
88
- @ladle ||= Ladle::Server.new(:quiet => true).start
92
+ $ladle ||= Ladle::Server.new(:quiet => true).start
89
93
  end
90
94
 
91
95
  This will start up a server for the first feature which needs it (and
@@ -95,7 +99,7 @@ end of the run. (Cucumber's hooks documentation notes that you would,
95
99
  in general, need to register an `at_exit` block for the process to be
96
100
  torn down at the end. {Ladle::Server#start} does this automatically.)
97
101
 
98
- [cucumber-hooks]: http://github.com/aslakhellesoy/cucumber/wiki/hooks
102
+ [cucumber-hooks]: http://github.com/cucumber/cucumber/wiki/hooks
99
103
 
100
104
  Test data
101
105
  ---------
@@ -121,9 +125,9 @@ provided by ApacheDS, you'll need to specify a custom schema. See
121
125
  Project links
122
126
  -------------
123
127
 
124
- * [API documentation](http://rubydoc.info/github/rsutphin/ladle/master/frames)
125
- * [Continuous integration](https://ctms-ci.nubic.northwestern.edu/hudson/job/ladle/)
126
- * [Issue tracking](http://github.com/rsutphin/ladle/issues)
128
+ * [API documentation](http://rubydoc.info/github/NUBIC/ladle/master/frames)
129
+ * [Continuous integration](https://travis-ci.org/NUBIC/ladle)
130
+ * [Issue tracking](http://github.com/NUBIC/ladle/issues)
127
131
 
128
132
  Non-issue questions can be sent to rhett@detailedbalance.net.
129
133
 
@@ -0,0 +1,8 @@
1
+ # Root logger option
2
+ log4j.rootLogger=ERROR, stdout
3
+
4
+ # Direct log messages to stdout
5
+ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
6
+ log4j.appender.stdout.Target=System.out
7
+ log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
8
+ log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
@@ -18,6 +18,7 @@ import java.io.InputStreamReader;
18
18
  import java.io.PrintWriter;
19
19
  import java.util.ArrayList;
20
20
  import java.util.Arrays;
21
+ import java.util.Collections;
21
22
  import java.util.List;
22
23
 
23
24
  /**
@@ -39,20 +40,19 @@ public class Main {
39
40
  behaveBadly(commandLine.getOptionValue('F'));
40
41
  }
41
42
 
43
+ List<String> schemaFileNames;
44
+ if (commandLine.hasOption('S')) {
45
+ schemaFileNames = Arrays.asList(commandLine.getOptionValue('S').split(","));
46
+ } else {
47
+ schemaFileNames = Collections.emptyList();
48
+ }
49
+
42
50
  final Server s = new Server(
43
51
  new Integer(commandLine.getOptionValue("p")),
44
52
  commandLine.getOptionValue("d"),
45
53
  new File(commandLine.getOptionValue("l")),
46
54
  new File(commandLine.getOptionValue('t')),
47
- !commandLine.hasOption('A'));
48
- if (commandLine.hasOption('S')) {
49
- List<String> schemaClassNames = Arrays.asList(commandLine.getOptionValue('S').split(","));
50
- List<Class<?>> schemaClasses = new ArrayList<Class<?>>(schemaClassNames.size());
51
- for (String schemaClassName : schemaClassNames) {
52
- schemaClasses.add(Class.forName(schemaClassName));
53
- }
54
- s.setCustomSchemas(schemaClasses);
55
- }
55
+ !commandLine.hasOption('A'), schemaFileNames);
56
56
 
57
57
  Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
58
58
  public void run() {
@@ -1,27 +1,47 @@
1
1
  package net.detailedbalance.ladle;
2
2
 
3
3
  import org.apache.commons.io.FileUtils;
4
- import org.apache.directory.server.configuration.MutableServerStartupConfiguration;
5
- import org.apache.directory.server.core.configuration.Configuration;
6
- import org.apache.directory.server.core.configuration.MutablePartitionConfiguration;
7
- import org.apache.directory.server.core.configuration.ShutdownConfiguration;
8
- import org.apache.directory.server.core.schema.bootstrap.BootstrapSchema;
4
+ import org.apache.directory.api.ldap.model.entry.Entry;
5
+ import org.apache.directory.api.ldap.model.entry.DefaultEntry;
6
+ import org.apache.directory.api.ldap.model.exception.LdapException;
7
+ import org.apache.directory.api.ldap.model.name.Dn;
8
+ import org.apache.directory.api.ldap.model.schema.SchemaManager;
9
+ import org.apache.directory.api.ldap.model.schema.registries.SchemaLoader;
10
+ import org.apache.directory.api.ldap.model.ldif.LdifReader;
11
+ import org.apache.directory.api.ldap.model.ldif.LdifEntry;
12
+ import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
13
+ import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
14
+ import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
15
+ import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
16
+ import org.apache.directory.api.util.exception.Exceptions;
17
+ import org.apache.directory.server.constants.ServerDNConstants;
18
+ import org.apache.directory.server.core.DefaultDirectoryService;
19
+ import org.apache.directory.server.core.api.CacheService;
20
+ import org.apache.directory.server.core.api.DirectoryService;
21
+ import org.apache.directory.server.core.api.DnFactory;
22
+ import org.apache.directory.server.core.api.InstanceLayout;
23
+ import org.apache.directory.server.core.api.partition.Partition;
24
+ import org.apache.directory.server.core.api.schema.SchemaPartition;
25
+ import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex;
26
+ import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition;
27
+ import org.apache.directory.server.core.partition.ldif.LdifPartition;
28
+ import org.apache.directory.server.i18n.I18n;
29
+ import org.apache.directory.server.ldap.LdapServer;
30
+ import org.apache.directory.server.protocol.shared.transport.TcpTransport;
9
31
  import org.apache.log4j.Logger;
10
32
 
11
33
  import javax.naming.Context;
12
34
  import javax.naming.NamingException;
13
- import javax.naming.directory.Attribute;
14
- import javax.naming.directory.Attributes;
15
- import javax.naming.directory.BasicAttribute;
16
- import javax.naming.directory.BasicAttributes;
17
- import javax.naming.directory.InitialDirContext;
18
35
  import java.io.File;
19
36
  import java.io.IOException;
37
+ import java.io.InputStream;
38
+ import java.io.FileInputStream;
20
39
  import java.util.Collection;
21
40
  import java.util.Collections;
22
41
  import java.util.HashSet;
23
42
  import java.util.Hashtable;
24
43
  import java.util.Set;
44
+ import java.util.List;
25
45
  import java.util.UUID;
26
46
 
27
47
  /**
@@ -31,6 +51,9 @@ import java.util.UUID;
31
51
  * <p>
32
52
  * The idea of using ApacheDS for this was from Spring Security's LDAP test support. The details
33
53
  * are from the ApacheDS embedding and unit testing documentation.
54
+ * <p>
55
+ * This version uses ApacheDS 2.0 and is based on
56
+ * http://svn.apache.org/repos/asf/directory/sandbox/kayyagari/embedded-sample-trunk/src/main/java/org/apache/directory/seserver/EmbeddedADSVerTrunk.java
34
57
  */
35
58
  public class Server {
36
59
  private final Logger log = Logger.getLogger(getClass());
@@ -39,25 +62,30 @@ public class Server {
39
62
  private final String domainComponent;
40
63
  private final boolean allowAnonymous;
41
64
  private final File tempDir;
42
- private final File ldifDir;
65
+ private final String ldifFileName;
43
66
  private boolean running = false;
44
- private Collection<Class<?>> customSchemas = Collections.emptyList();
67
+ private Collection<String> customSchemaFilenames;
68
+
69
+ private DirectoryService service;
70
+ private LdapServer ldapServer;
45
71
 
46
72
  public Server(
47
- int port, String domainComponent, File ldifFile, File tempDirBase, boolean allowAnonymous
73
+ int port, String domainComponent, File ldifFile, File tempDirBase, boolean allowAnonymous,
74
+ Collection<String> customSchemaFilenames
48
75
  ) {
49
76
  this.port = port;
50
77
  this.domainComponent = domainComponent;
51
78
  this.allowAnonymous = allowAnonymous;
52
79
  this.tempDir = createTempDir(tempDirBase);
53
- this.ldifDir = prepareLdif(ldifFile);
80
+ this.ldifFileName = ldifFile.getPath();
81
+ this.customSchemaFilenames = customSchemaFilenames;
54
82
  }
55
83
 
56
84
  ////// SETUP
57
85
 
58
86
  private File createTempDir(File tempDirBase) {
59
87
  File temp = new File(tempDirBase, "ladle-server-" + UUID.randomUUID());
60
-
88
+
61
89
  if (temp.mkdir()) {
62
90
  return temp;
63
91
  } else {
@@ -79,47 +107,66 @@ public class Server {
79
107
  return env;
80
108
  }
81
109
 
82
- private File prepareLdif(File ldifFile) {
83
- File dir = new File(tempDir, "ldif");
84
- if (!dir.mkdir()) {
85
- throw new LadleFatalException("Could not create LDIF directory " + dir);
86
- }
110
+ ////// RUNNING
111
+
112
+ @SuppressWarnings(value={"unchecked"})
113
+ public void start() throws Exception {
114
+ if (running) return;
87
115
 
88
116
  try {
89
- FileUtils.copyFileToDirectory(ldifFile, dir);
90
- } catch (IOException e) {
91
- throw new LadleFatalException("Copying " + ldifFile + " to " + dir + " failed.", e);
92
- }
117
+ // Initialize the LDAP service
118
+ service = new DefaultDirectoryService();
119
+ service.setInstanceLayout( new InstanceLayout( tempDir ) );
93
120
 
94
- return dir;
95
- }
121
+ CacheService cacheService = new CacheService();
122
+ cacheService.initialize( service.getInstanceLayout() );
96
123
 
97
- ////// RUNNING
124
+ service.setCacheService( cacheService );
98
125
 
99
- @SuppressWarnings({"unchecked"})
100
- public void start() {
101
- if (running) return;
126
+ // first load the schema
127
+ initSchemaPartition();
102
128
 
103
- try {
104
- MutableServerStartupConfiguration cfg = new MutableServerStartupConfiguration();
105
- cfg.setWorkingDirectory(tempDir);
106
- cfg.setLdifDirectory(ldifDir);
107
- cfg.setEnableNetworking(true);
108
- cfg.setLdapPort(port);
109
- cfg.setAllowAnonymousAccess(allowAnonymous);
110
- cfg.setAccessControlEnabled(false);
111
- cfg.setShutdownHookEnabled(false);
112
- cfg.setContextPartitionConfigurations(
113
- Collections.singleton(createPartitionConfiguration()));
114
- if (!customSchemas.isEmpty()) {
115
- Set<BootstrapSchema> schemas = cfg.getBootstrapSchemas();
116
- for (Class<?> customSchemaClass : customSchemas) {
117
- schemas.add((BootstrapSchema) customSchemaClass.newInstance());
118
- }
119
- cfg.setBootstrapSchemas(schemas);
129
+ // then the system partition
130
+ initSystemPartition();
131
+
132
+ // Disable the ChangeLog system
133
+ service.getChangeLog().setEnabled( false );
134
+ service.setDenormalizeOpAttrsEnabled( true );
135
+
136
+ // Now we can create as many partitions as we need
137
+ Partition ladlePartition = addPartition( "ladle", domainComponent, service.getDnFactory() );
138
+
139
+ // Setup indexes, access rules, and start it up
140
+ addIndex( ladlePartition, "objectClass", "ou", "dc", "uid" );
141
+ service.setAllowAnonymousAccess( allowAnonymous );
142
+ service.startup();
143
+
144
+ // Inject the context entry for the partition if it does not already exist
145
+ try
146
+ {
147
+ service.getAdminSession().lookup( ladlePartition.getSuffixDn() );
148
+ }
149
+ catch ( LdapException lnnfe )
150
+ {
151
+ Dn userDN = new Dn( domainComponent );
152
+ Entry userEntry = service.newEntry( userDN );
153
+ userEntry.add( "objectClass", "top", "domain", "extensibleObject" );
154
+ userEntry.add( "dc", domainComponent.split(",")[0].substring(3) );
155
+ service.getAdminSession().add( userEntry );
156
+ }
157
+
158
+ // Load up any extra data
159
+ for (String schemaFileName : customSchemaFilenames) {
160
+ loadLDIF(schemaFileName);
120
161
  }
162
+ loadLDIF(ldifFileName);
121
163
 
122
- new InitialDirContext(createJndiEnvironment(cfg));
164
+ // Now create the LDAP server and transport for the Directory Service.
165
+ ldapServer = new LdapServer();
166
+ ldapServer.setDirectoryService( service );
167
+ TcpTransport ldapTransport = new TcpTransport( port );
168
+ ldapServer.setTransports( ldapTransport );
169
+ ldapServer.start();
123
170
  } catch (NamingException e) {
124
171
  throw new LadleFatalException("Startup failed", e);
125
172
  } catch (InstantiationException e) {
@@ -131,49 +178,139 @@ public class Server {
131
178
  running = true;
132
179
  }
133
180
 
134
- // Derived from http://directory.apache.org/apacheds/1.0/using-apacheds-for-unit-tests.html
135
- private MutablePartitionConfiguration createPartitionConfiguration() throws NamingException {
136
- MutablePartitionConfiguration pCfg = new MutablePartitionConfiguration();
137
- pCfg.setName("ladle");
138
- pCfg.setSuffix(domainComponent);
181
+ public void loadLDIF(String filepath) throws Exception {
182
+
183
+ log.info("Loading : " + filepath);
184
+
185
+ if (!service.isStarted()) {
186
+ throw new Exception("Directory service not started");
187
+ } else {
188
+ InputStream inputStream = null;
189
+ SchemaManager schemaManager = service.getSchemaManager();
190
+ try {
191
+ inputStream = new FileInputStream(filepath);
192
+ if (inputStream != null) {
193
+ LdifReader entries = new LdifReader(inputStream);
194
+ for (LdifEntry ldifEntry : entries) {
195
+ DefaultEntry newEntry = new DefaultEntry(schemaManager, ldifEntry.getEntry());
196
+ service.getAdminSession().add( newEntry );
197
+ }
198
+ }
199
+ } finally {
200
+ if (inputStream != null) inputStream.close();
201
+ }
202
+ }
203
+ }
204
+
205
+ /**
206
+ * Add a new partition to the server
207
+ *
208
+ * @param partitionId The partition Id
209
+ * @param partitionDn The partition DN
210
+ * @param dnFactory The DN factory
211
+ * @return The newly added partition
212
+ * @throws Exception If the partition can't be added
213
+ */
214
+ private Partition addPartition( String partitionId, String partitionDn, DnFactory dnFactory ) throws Exception
215
+ {
216
+ // Create a new partition with the given partition id
217
+ JdbmPartition partition = new JdbmPartition(service.getSchemaManager(), dnFactory);
218
+ partition.setId( partitionId );
219
+ partition.setPartitionPath( new File( service.getInstanceLayout().getPartitionsDirectory(), partitionId ).toURI() );
220
+ partition.setSuffixDn( new Dn( partitionDn ) );
221
+ service.addPartition( partition );
222
+
223
+ return partition;
224
+ }
225
+
226
+ /**
227
+ * initialize the schema manager and add the schema partition to directory service
228
+ *
229
+ * @throws Exception if the schema LDIF files are not found on the classpath
230
+ */
231
+ private void initSchemaPartition() throws Exception
232
+ {
233
+ InstanceLayout instanceLayout = service.getInstanceLayout();
139
234
 
140
- Set<String> indexedAttrs = new HashSet<String>();
141
- indexedAttrs.add("objectClass");
142
- indexedAttrs.add("dc");
143
- indexedAttrs.add("uid");
144
- pCfg.setIndexedAttributes( indexedAttrs );
235
+ File schemaPartitionDirectory = new File( instanceLayout.getPartitionsDirectory(), "schema" );
145
236
 
146
- // Create the root entry
237
+ // Extract the schema on disk (a brand new one) and load the registries
238
+ if ( schemaPartitionDirectory.exists() )
147
239
  {
148
- Attributes attrs = new BasicAttributes(true);
240
+ log.warn( "schema partition already exists, skipping schema extraction" );
241
+ }
242
+ else
243
+ {
244
+ SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( instanceLayout.getPartitionsDirectory() );
245
+ extractor.extractOrCopy();
246
+ }
149
247
 
150
- Attribute attr = new BasicAttribute("objectClass");
151
- attr.add("top");
152
- attr.add("domain");
153
- attrs.put(attr);
248
+ SchemaLoader loader = new LdifSchemaLoader( schemaPartitionDirectory );
249
+ SchemaManager schemaManager = new DefaultSchemaManager( loader );
154
250
 
155
- attr = new BasicAttribute("dc");
156
- attr.add(domainComponent.split(",")[0].substring(3));
157
- attrs.put(attr);
251
+ // We have to load the schema now, otherwise we won't be able
252
+ // to initialize the Partitions, as we won't be able to parse
253
+ // and normalize their suffix Dn
254
+ schemaManager.loadAllEnabled();
158
255
 
159
- pCfg.setContextEntry(attrs);
256
+ List<Throwable> errors = schemaManager.getErrors();
257
+
258
+ if ( errors.size() != 0 )
259
+ {
260
+ throw new Exception( I18n.err( I18n.ERR_317, Exceptions.printErrors( errors ) ) );
160
261
  }
161
262
 
162
- return pCfg;
263
+ service.setSchemaManager( schemaManager );
264
+
265
+ // Init the LdifPartition with schema
266
+ LdifPartition schemaLdifPartition = new LdifPartition( schemaManager, service.getDnFactory() );
267
+ schemaLdifPartition.setPartitionPath( schemaPartitionDirectory.toURI() );
268
+
269
+ // The schema partition
270
+ SchemaPartition schemaPartition = new SchemaPartition( schemaManager );
271
+ schemaPartition.setWrappedPartition( schemaLdifPartition );
272
+ service.setSchemaPartition( schemaPartition );
163
273
  }
164
274
 
165
- @SuppressWarnings({ "unchecked" })
166
- private Hashtable<String, String> createJndiEnvironment(Configuration cfg) {
167
- Hashtable<String, String> env = baseEnvironment();
168
- env.putAll(cfg.toJndiEnvironment());
169
- return env;
275
+ private void initSystemPartition() throws Exception {
276
+ // this is a MANDATORY partition
277
+ // DO NOT add this via addPartition() method, trunk code complains about duplicate partition
278
+ // while initializing
279
+ JdbmPartition systemPartition = new JdbmPartition( service.getSchemaManager(), service.getDnFactory() );
280
+ systemPartition.setId( "system" );
281
+ systemPartition.setPartitionPath( new File( service.getInstanceLayout().getPartitionsDirectory(), systemPartition.getId() ).toURI() );
282
+ systemPartition.setSuffixDn( new Dn( ServerDNConstants.SYSTEM_DN ) );
283
+ systemPartition.setSchemaManager( service.getSchemaManager() );
284
+
285
+ // mandatory to call this method to set the system partition
286
+ // Note: this system partition might be removed from trunk
287
+ service.setSystemPartition( systemPartition );
288
+ }
289
+
290
+ /**
291
+ * Add a new set of index on the given attributes
292
+ *
293
+ * @param partition The partition on which we want to add index
294
+ * @param attrs The list of attributes to index
295
+ */
296
+ private void addIndex( Partition partition, String... attrs )
297
+ {
298
+ // Index some attributes on the apache partition
299
+ Set indexedAttributes = new HashSet();
300
+
301
+ for ( String attribute : attrs )
302
+ {
303
+ indexedAttributes.add( new JdbmIndex( attribute, false ) );
304
+ }
305
+
306
+ ( ( JdbmPartition ) partition ).setIndexedAttributes( indexedAttributes );
170
307
  }
171
308
 
172
- public void stop() {
309
+ public void stop() throws LadleFatalException {
173
310
  if (!running) return;
174
311
  try {
175
- new InitialDirContext(createJndiEnvironment(new ShutdownConfiguration()));
176
- } catch (NamingException e) {
312
+ service.shutdown();
313
+ } catch (Exception e) {
177
314
  throw new LadleFatalException("Shutdown failed", e);
178
315
  }
179
316
  running = false;
@@ -186,8 +323,4 @@ public class Server {
186
323
  }
187
324
  }
188
325
  }
189
-
190
- public void setCustomSchemas(Collection<Class<?>> customSchemas) {
191
- this.customSchemas = customSchemas;
192
- }
193
326
  }