Problem in writing more record to berkeleyDB
i am using berkeley DB JE 4.1.10 i have use the secondary database concept and I want to insert 10,00,000 rows to the database . i am getting following error 开发者_高级运维Exception in thread "main" com.sleepycat.je.EnvironmentFailureException: (JE 4.1.10) JAVA_ERROR: Java Error occurred, recovery may not be possible. at com.sleepycat.je.dbi.EnvironmentImpl.checkIfInvalid(EnvironmentImpl.java:1450) at com.sleepycat.je.Transaction.checkEnv(Transaction.java:756) at com.sleepycat.je.Transaction.abort(Transaction.java:105) at WriteDataUsingCursor.main(WriteDataUsingCursor.java:90) Caused by: java.lang.OutOfMemoryError: Java heap space i have set the following parameter for environment and database Environment env=null; Database primarydb=null; SecondaryDatabase secondarydb=null; @SuppressWarnings("rawtypes") TupleBinding mybinding=null; String firstdb="CDRFirstDatabase"; String seconddb = "CDRSecondDatabase"; public void setupenvironment() { try { // Environment File dbpath = new File("W:/dhananjay/workspace/BDB-SecondaryDatabase(JEAPI)-SmaxCDR/src/DB1"); EnvironmentConfig envconfig = new EnvironmentConfig(); envconfig.setAllowCreate(true); envconfig.setTransactional(true); env = new Environment(dbpath,envconfig); // Primary Database DatabaseConfig dbconfig = new DatabaseConfig(); dbconfig.setAllowCreate(true); dbconfig.setTransactional(true); dbconfig.setSortedDuplicates(false); primarydb = env.openDatabase(null, firstdb, dbconfig); // Secondary Database SecondaryConfig secondarydbconfig = new SecondaryConfig(); secondarydbconfig.setAllowCreate(true); secondarydbconfig.setTransactional(true); secondarydbconfig.setSortedDuplicates(true); // Create Key for secondary DB mybinding = new MyTupleBinding(); SecondaryKey keycreator = new SecondaryKey(mybinding); secondarydbconfig.setKeyCreator(keycreator); secondarydb = env.openSecondaryDatabase(null, seconddb, primarydb, secondarydbconfig); } catch(Exception e) { System.out.println("Error----->"+e); } } i am using following code to insert the data into database DBEnvironment dbenv = new DBEnvironment(); DatabaseEntry theKey = new DatabaseEntry(); DatabaseEntry theData = new DatabaseEntry(); @SuppressWarnings("unchecked") public static void main(String[] args) { Cursor cursor = null; Transaction txn=null; dbenv.setupenvironment(); try { txn = dbenv.getEnv().beginTransaction(null, null); cursor = dbenv.getPrimaryDB().openCursor(txn, null); String line = null; BufferedReader br = new BufferedReader( new FileReader("W:/dhananjay/workspace/SMAXCDR/110618_154501.cdr")); long startTime = System.currentTimeMillis(); while((line = br.readLine()) != null) { String[] sArray = line.split(","); CDRData cdrfile = new CDRData(); theKey = new DatabaseEntry(sArray[0].getBytes("UTF-8")); cdrfile.setId(sArray[0]); cdrfile.setCallTime(sArray[1]); cdrfile.setDisconnectTime(sArray[2]); cdrfile.setIngressTrunkID(sArray[3]); cdrfile.setIngressSignalingIP(sArray[4]); cdrfile.setIngressMediaIP(sArray[5]); cdrfile.setEgressTrunkID(sArray[6]); cdrfile.setEgressSignalingIP(sArray[7]); cdrfile.setEgressMediaIP(sArray[8]); cdrfile.setANI(sArray[9]); cdrfile.setDNID(sArray[10]); cdrfile.setDNID_with_IngressPrefix(sArray[11]); cdrfile.setDNID_with_EgressPrefix(sArray[12]); cdrfile.setCodeMatch(sArray[13]); cdrfile.setProtocol(sArray[14]); cdrfile.setCallID(sArray[15]); cdrfile.setSMaxCallID(sArray[16]); cdrfile.setRingDuration(sArray[17]); cdrfile.setPDDDuration(sArray[18]); cdrfile.setRawDuration(sArray[19]); cdrfile.setCauseCode(sArray[20]); cdrfile.setTFlag(sArray[21]); cdrfile.setRFlag(sArray[22]); dbenv.getBinding().objectToEntry(cdrfile, theData); cursor.put(theKey, theData); } cursor.close(); txn.commit(); dbenv.close(); long finishTime = System.currentTimeMillis(); System.out.println("The time taken by insert query : "(finishTime-startTime) " ms"); System.out.println("Record Inserted"); }
Cursors and transactions hang on to memory until they are closed/committed. You'll need to insert in batches, perhaps 1000. For each batch, close the cursor when done, commit the transaction, and then create new ones for the next batch.
精彩评论