I've encountered serious problems with the following code under windows:
#include <Core/Core.h>
#include <occi.h>
CONSOLE_APP_MAIN
{
std::string rs;
{
oracle::occi::Environment *env = oracle::occi::Environment::createEnvironment(oracle::occi::Environment::DEFAULT);
oracle::occi::Connection* con = env->createConnection("Scott", "Tiger",
"(DESCRIPTION=(ADDRESS_LIST=(ADDRESS=(PROTOCOL=TCP)(HOST=192.168.1.1)(PORT=1521)))(CONNECT_DATA=(SERVICE_NAME=test.upp)))");
oracle::occi::Statement* stmt = con->createStatement("SELECT 'ABC' FROM DUAL");
oracle::occi::ResultSet* rset = stmt->executeQuery();
if (rset->next()) {
rs = rset->getString(1);
} // LEAVING THIS SCOPE LEADS TO ERROR!
stmt->closeResultSet(rset);
env->terminateConnection(con);
oracle::occi::Environment::terminateEnvironment(env);
}
if (rs.length()) {
}
}
ERROR (running in debug):
This test-scenario only fails in debug actually; I haven't found a simple version for release mode until now.
What am I doing wrong here?
Why tries Ultimate to free memory, that the occi library created using the msvc crt??
Am I missing something? Can you give me some hints?
Kind regards
wqcmaster
P.S.: Using UPP version 5485; Visual Studio 2010; Oracle OCCI Library 11.2 with VC10