diff --git a/datascraper/main_datascraper.py b/datascraper/main_datascraper.py index eb5f62ca..ff031dac 100644 --- a/datascraper/main_datascraper.py +++ b/datascraper/main_datascraper.py @@ -68,7 +68,8 @@ def start_datascraper(json_config, site_name_lower, apis: list = [], webhooks=Tr api, identifiers, jobs) if not setup: api.auth.auth_details.active = False - auth_details = api.auth.auth_details.__dict__ + auth_details = {} + auth_details["auth"] = api.auth.auth_details.__dict__ profile_directory = api.auth.profile_directory if profile_directory: user_auth_filepath = os.path.join( diff --git a/helpers/main_helper.py b/helpers/main_helper.py index 8ed24622..866615b0 100644 --- a/helpers/main_helper.py +++ b/helpers/main_helper.py @@ -138,24 +138,27 @@ def import_archive(archive_path) -> Any: return metadata -def legacy_database_fixer(database_path, database, database_name): +def legacy_database_fixer(database_path, database, database_name,database_exists): database_directory = os.path.dirname(database_path) old_database_path = database_path old_filename = os.path.basename(old_database_path) new_filename = f"Pre_Alembic_{old_filename}" - new_database_path = os.path.join(database_directory, new_filename) - saved = False - if os.path.exists(new_database_path): - database_path = new_database_path - saved = True - Session, engine = db_helper.create_database_session(database_path) - database_session = Session() + pre_alembic_path = os.path.join(database_directory, new_filename) + pre_alembic_database_exists = False + if os.path.exists(pre_alembic_path): + database_path = pre_alembic_path + pre_alembic_database_exists = True datas = [] - result = engine.dialect.has_table(engine, 'alembic_version') - if not result: - if not saved: - os.rename(old_database_path, new_database_path) - Session, engine = db_helper.create_database_session(new_database_path) + if database_exists: + Session, engine = db_helper.create_database_session(database_path) + database_session = Session() + result = engine.dialect.has_table(engine, 'alembic_version') + if not result: + if not pre_alembic_database_exists: + os.rename(old_database_path, pre_alembic_path) + pre_alembic_database_exists = True + if pre_alembic_database_exists: + Session, engine = db_helper.create_database_session(pre_alembic_path) database_session = Session() api_table = database.api_table() media_table = database.media_table() @@ -194,12 +197,12 @@ def legacy_database_fixer(database_path, database, database_name): datas.append(new_item) print database_session.close() - x = make_metadata(old_database_path, datas, - database_name, legacy_fixer=True) + x = export_sqlite(old_database_path, datas, + database_name, legacy_fixer=True) print -def make_metadata(archive_path, datas, parent_type, legacy_fixer=False): +def export_sqlite(archive_path, datas, parent_type, legacy_fixer=False): metadata_directory = os.path.dirname(archive_path) os.makedirs(metadata_directory, exist_ok=True) cwd = os.getcwd() @@ -211,9 +214,13 @@ def make_metadata(archive_path, datas, parent_type, legacy_fixer=False): database = db_collection.chooser(database_name) alembic_location = os.path.join( cwd, "database", "databases", database_name) - exists = os.path.exists(database_path) - if not legacy_fixer and exists: - x = legacy_database_fixer(database_path, database, database_name) + database_exists = os.path.exists(database_path) + if database_exists: + if os.path.getsize(database_path) == 0: + os.remove(database_path) + database_exists = False + if not legacy_fixer: + x = legacy_database_fixer(database_path, database, database_name,database_exists) db_helper.run_migrations(alembic_location, database_path) print Session, engine = db_helper.create_database_session(database_path) diff --git a/modules/onlyfans.py b/modules/onlyfans.py index 0bc3dac8..ff6c562f 100644 --- a/modules/onlyfans.py +++ b/modules/onlyfans.py @@ -90,7 +90,7 @@ def account_setup(api: start, identifiers: list = [], jobs: dict = {}): if "auth" in imported: imported = imported["auth"] mass_messages = api.get_mass_messages(resume=imported) - main_helper.export_data(mass_messages,metadata_filepath) + main_helper.export_data(mass_messages, metadata_filepath) # chats = api.get_chats() if identifiers or jobs["scrape_names"]: subscriptions += manage_subscriptions( @@ -520,12 +520,12 @@ def process_legacy_metadata(api: start, new_metadata_set, formatted_directories, archive_path = archive_path.replace("db", "json") legacy_metadata_object, delete_legacy_metadatas = legacy_metadata_fixer( formatted_directories, api) - if legacy_metadata_object: + if delete_legacy_metadatas: print("Merging new metadata with legacy metadata.") old_metadata_set = import_archive(archive_path) old_metadata_object = create_metadata( api, old_metadata_set, api_type=api_type) - if old_metadata_object: + if old_metadata_set: print("Merging new metadata with old metadata.") old_metadata_object = compare_metadata( old_metadata_object, legacy_metadata_object) @@ -554,7 +554,7 @@ def process_legacy_metadata(api: start, new_metadata_set, formatted_directories, def process_metadata(archive_path, new_metadata_object, site_name, parent_type, api_path, subscription, delete_metadatas): - Session, api_type, folder = main_helper.make_metadata( + Session, api_type, folder = main_helper.export_sqlite( archive_path, new_metadata_object, parent_type) if not subscription.download_info: subscription.download_info["metadata_locations"] = {}