Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TEST: add basic postgresql tests #6316

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ install:

before_script:
- mysql -e 'create database pandas_nosetest;'
- psql -c 'create database pandas_nosetest;' -U postgres

script:
- echo "script"
Expand Down
1 change: 1 addition & 0 deletions ci/requirements-2.7.txt
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ beautifulsoup4==4.2.1
statsmodels==0.5.0
bigquery==2.0.17
sqlalchemy==0.8.1
psycopg2==2.5.2
1 change: 1 addition & 0 deletions ci/requirements-3.3.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ scipy==0.12.0
beautifulsoup4==4.2.1
statsmodels==0.4.3
sqlalchemy==0.9.1
psycopg2==2.5.2
162 changes: 120 additions & 42 deletions pandas/io/tests/test_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,19 @@
`PetalLength` DOUBLE,
`PetalWidth` DOUBLE,
`Name` VARCHAR(200)
)""",
'postgresql': """CREATE TABLE iris (
"SepalLength" DOUBLE PRECISION,
"SepalWidth" DOUBLE PRECISION,
"PetalLength" DOUBLE PRECISION,
"PetalWidth" DOUBLE PRECISION,
"Name" VARCHAR(200)
)"""
},
'insert_iris': {
'sqlite': """INSERT INTO iris VALUES(?, ?, ?, ?, ?)""",
'mysql': """INSERT INTO iris VALUES(%s, %s, %s, %s, "%s");"""
'mysql': """INSERT INTO iris VALUES(%s, %s, %s, %s, "%s");""",
'postgresql': """INSERT INTO iris VALUES(%s, %s, %s, %s, %s);"""
},
'create_test_types': {
'sqlite': """CREATE TABLE types_test_data (
Expand All @@ -62,6 +70,16 @@
`BoolCol` BOOLEAN,
`IntColWithNull` INTEGER,
`BoolColWithNull` BOOLEAN
)""",
'postgresql': """CREATE TABLE types_test_data (
"TextCol" TEXT,
"DateCol" TIMESTAMP,
"IntDateCol" INTEGER,
"FloatCol" DOUBLE PRECISION,
"IntCol" INTEGER,
"BoolCol" BOOLEAN,
"IntColWithNull" INTEGER,
"BoolColWithNull" BOOLEAN
)"""
},
'insert_test_types': {
Expand All @@ -72,6 +90,10 @@
'mysql': """
INSERT INTO types_test_data
VALUES("%s", %s, %s, %s, %s, %s, %s, %s)
""",
'postgresql': """
INSERT INTO types_test_data
VALUES(%s, %s, %s, %s, %s, %s, %s, %s)
"""
}
}
Expand Down Expand Up @@ -403,29 +425,12 @@ def test_date_and_index(self):
"IntDateCol loaded with incorrect type")


class TestSQLAlchemy(PandasSQLTest):

'''
Test the sqlalchemy backend against an in-memory sqlite database.
class _TestSQLAlchemy(PandasSQLTest):
"""
Base class for testing the sqlalchemy backend. Subclasses for specific
database types are created below.
Assume that sqlalchemy takes case of the DB specifics
'''
flavor = 'sqlite'

def connect(self):
return sqlalchemy.create_engine('sqlite:///:memory:')

def setUp(self):
# Skip this test if SQLAlchemy not available
if not SQLALCHEMY_INSTALLED:
raise nose.SkipTest('SQLAlchemy not installed')

self.conn = self.connect()
self.pandasSQL = sql.PandasSQLAlchemy(self.conn)

self._load_iris_data()
self._load_raw_sql()

self._load_test1_data()
"""

def test_read_sql(self):
self._read_sql_iris()
Expand Down Expand Up @@ -491,32 +496,31 @@ def test_read_table_absent(self):
ValueError, sql.read_table, "this_doesnt_exist", con=self.conn)

def test_default_type_convertion(self):
""" Test default type conversion"""
df = sql.read_table("types_test_data", self.conn)
self.assertTrue(
issubclass(df.FloatCol.dtype.type, np.floating), "FloatCol loaded with incorrect type")
self.assertTrue(
issubclass(df.IntCol.dtype.type, np.integer), "IntCol loaded with incorrect type")
self.assertTrue(
issubclass(df.BoolCol.dtype.type, np.integer), "BoolCol loaded with incorrect type")

self.assertTrue(issubclass(df.FloatCol.dtype.type, np.floating),
"FloatCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntCol.dtype.type, np.integer),
"IntCol loaded with incorrect type")
self.assertTrue(issubclass(df.BoolCol.dtype.type, np.bool_),
"BoolCol loaded with incorrect type")

# Int column with NA values stays as float
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Non-native Bool column with NA values stays as float
self.assertTrue(
issubclass(df.BoolColWithNull.dtype.type, np.floating), "BoolCol loaded with incorrect type")
# Bool column with NA values becomes object
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.object),
"BoolColWithNull loaded with incorrect type")

def test_default_date_load(self):
df = sql.read_table("types_test_data", self.conn)

# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
# MySQL SHOULD be converted.
self.assertFalse(
self.assertTrue(
issubclass(df.DateCol.dtype.type, np.datetime64), "DateCol loaded with incorrect type")

def test_date_parsing(self):
""" Test date parsing """
# No Parsing
df = sql.read_table("types_test_data", self.conn)

Expand Down Expand Up @@ -551,6 +555,54 @@ def test_date_parsing(self):
"IntDateCol loaded with incorrect type")


class TestSQLAlchemy(_TestSQLAlchemy):
"""
Test the sqlalchemy backend against an in-memory sqlite database.
"""
flavor = 'sqlite'

def connect(self):
return sqlalchemy.create_engine('sqlite:///:memory:')

def setUp(self):
# Skip this test if SQLAlchemy not available
if not SQLALCHEMY_INSTALLED:
raise nose.SkipTest('SQLAlchemy not installed')

self.conn = self.connect()
self.pandasSQL = sql.PandasSQLAlchemy(self.conn)

self._load_iris_data()
self._load_raw_sql()

self._load_test1_data()

def test_default_type_convertion(self):
df = sql.read_table("types_test_data", self.conn)

self.assertTrue(issubclass(df.FloatCol.dtype.type, np.floating),
"FloatCol loaded with incorrect type")
self.assertTrue(issubclass(df.IntCol.dtype.type, np.integer),
"IntCol loaded with incorrect type")
# sqlite has no boolean type, so integer type is returned
self.assertTrue(issubclass(df.BoolCol.dtype.type, np.integer),
"BoolCol loaded with incorrect type")

# Int column with NA values stays as float
self.assertTrue(issubclass(df.IntColWithNull.dtype.type, np.floating),
"IntColWithNull loaded with incorrect type")
# Non-native Bool column with NA values stays as float
self.assertTrue(issubclass(df.BoolColWithNull.dtype.type, np.floating),
"BoolColWithNull loaded with incorrect type")

def test_default_date_load(self):
df = sql.read_table("types_test_data", self.conn)

# IMPORTANT - sqlite has no native date type, so shouldn't parse, but
self.assertFalse(issubclass(df.DateCol.dtype.type, np.datetime64),
"DateCol loaded with incorrect type")


# --- Test SQLITE fallback
class TestSQLite(PandasSQLTest):

Expand Down Expand Up @@ -660,7 +712,7 @@ def tearDown(self):
self.conn.close()


class TestMySQLAlchemy(TestSQLAlchemy):
class TestMySQLAlchemy(_TestSQLAlchemy):
flavor = 'mysql'

def connect(self):
Expand Down Expand Up @@ -691,13 +743,39 @@ def tearDown(self):
for table in c.fetchall():
self.conn.execute('DROP TABLE %s' % table[0])

def test_default_date_load(self):
df = sql.read_table("types_test_data", self.conn)

# IMPORTANT - sqlite has no native date type, so shouldn't parse,
# but MySQL SHOULD be converted.
self.assertTrue(
issubclass(df.DateCol.dtype.type, np.datetime64), "DateCol loaded with incorrect type")
class TestPostgreSQLAlchemy(_TestSQLAlchemy):
flavor = 'postgresql'

def connect(self):
return sqlalchemy.create_engine(
'postgresql+{driver}://postgres@localhost/pandas_nosetest'.format(driver=self.driver))

def setUp(self):
if not SQLALCHEMY_INSTALLED:
raise nose.SkipTest('SQLAlchemy not installed')

try:
import psycopg2
self.driver = 'psycopg2'

except ImportError:
raise nose.SkipTest

self.conn = self.connect()
self.pandasSQL = sql.PandasSQLAlchemy(self.conn)

self._load_iris_data()
self._load_raw_sql()

self._load_test1_data()

def tearDown(self):
c = self.conn.execute(
"SELECT table_name FROM information_schema.tables"
" WHERE table_schema = 'public'")
for table in c.fetchall():
self.conn.execute("DROP TABLE %s" % table[0])

if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
Expand Down