diff --git a/README.md b/README.md index 5fcdde3c..e301e87a 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,15 @@ More over, using **Import tables** feature from any supported database is provid Hope this sparked some interest! Thank you. + +## Installation and Launch +``` +pip install jam.py-v7 +jam-project.py +server.py +``` + + [![alt text](https://github.com/jam-py-v5/jam-py/blob/develop/assets/images/JAMPY_Readme.gif?raw=true)](https://northwind.pythonanywhere.com) @@ -33,7 +42,7 @@ Some short videos about how to setup Jam.py and create applications: * [Setting up interface of Jam.py application using Forms Dialogs](https://youtu.be/hvNZ0-a_HHw) -Longer +Longer video: [video](https://youtu.be/qkJvGlgoabU) with dashboards and complex internal logic. Live demos on PythonAnywhere: diff --git a/utils/01 univ smart sync for Neo4j - v3.1/example.db b/utils/01 univ smart sync for Neo4j - v3.1/example.db new file mode 100644 index 00000000..35c77458 Binary files /dev/null and b/utils/01 univ smart sync for Neo4j - v3.1/example.db differ diff --git a/utils/01 univ smart sync for Neo4j - v3.1/sync_log.json b/utils/01 univ smart sync for Neo4j - v3.1/sync_log.json new file mode 100644 index 00000000..3c50cced --- /dev/null +++ b/utils/01 univ smart sync for Neo4j - v3.1/sync_log.json @@ -0,0 +1,5 @@ +{ + "person": "2025-10-19 20:24:55", + "project": "2025-10-19 20:24:55", + "person_project": "2025-10-19 20:24:55" +} \ No newline at end of file diff --git a/utils/01 univ smart sync for Neo4j - v3.1/uni_smart_sync_v3.1_for_neo4j.py b/utils/01 univ smart sync for Neo4j - v3.1/uni_smart_sync_v3.1_for_neo4j.py new file mode 100644 index 00000000..9e2e2644 --- /dev/null +++ b/utils/01 univ smart sync for Neo4j - v3.1/uni_smart_sync_v3.1_for_neo4j.py @@ -0,0 +1,349 @@ +import sqlite3 +from neo4j import GraphDatabase +import pandas as pd +import hashlib, json, requests, sys +from collections import defaultdict +from datetime import datetime +from pathlib import Path + +# ========================= +# CONFIG +# ========================= +NEO4J_URI = "bolt://localhost:7687" +NEO4J_AUTH = ("neo4j", "rrad0812") + +SYNC_LOG_FILE = Path("importer/sync_log.json") + +# True = simulacija (NE PIŠE u Neo4j / log); False = piše promene +DRY_RUN = False + +# Boje za lepši log (on/off) +USE_COLOR = True + + +# ========================= +# UTIL: boje & ispisi +# ========================= +def c(code, s): + if not USE_COLOR: + return s + return f"\033[{code}m{s}\033[0m" + +def info(msg): print(c("36", f"[i] {msg}")) +def ok(msg): print(c("32", f"[✓] {msg}")) +def warn(msg): print(c("33", f"[!] {msg}")) +def err(msg): print(c("31", f"[x] {msg}")) + + +# ========================= +# DATA SOURCE ADAPTERS +# ========================= +def load_sqlite_dataset(db_path): + conn = sqlite3.connect(db_path) + cur = conn.cursor() + cur.execute("SELECT name FROM sqlite_master WHERE type='table'") + tables = [r[0] for r in cur.fetchall()] + dataset = {} + for t in tables: + df = pd.read_sql_query(f"SELECT * FROM {t}", conn) + dataset[t] = df.to_dict(orient="records") + ok(f"SQLite učitan ({db_path}) sa {len(dataset)} tabela.") + return dataset + +def load_csv_dataset(csv_dir): + dataset = {} + for f in Path(csv_dir).glob("*.csv"): + table = f.stem + df = pd.read_csv(f) + dataset[table] = df.to_dict(orient="records") + ok(f"CSV folder '{csv_dir}' učitan sa {len(dataset)} tabela.") + return dataset + +def load_json_dataset(json_path): + data = json.loads(Path(json_path).read_text(encoding="utf-8")) + ok(f"JSON '{json_path}' učitan sa {len(data)} tabela.") + return data + +def load_api_dataset(api_url): + r = requests.get(api_url, timeout=60) + r.raise_for_status() + data = r.json() + ok(f"API '{api_url}' učitan sa {len(data)} tabela.") + return data + + +# ========================= +# SYNC UTILS +# ========================= +def make_hash(row): + # stabilan hash nad svim ključevima (None -> "") + row_str = "|".join(f"{k}:{'' if row[k] is None else row[k]}" for k in sorted(row)) + return hashlib.md5(row_str.encode()).hexdigest() + +def make_rel_hash(a_id, b_id, props_dict): + # hash za relaciju (krajevi + props) – koristi se samo za DRY dif log + base = f"a:{a_id}|b:{b_id}|" + "|".join(f"{k}:{props_dict.get(k)}" for k in sorted(props_dict)) + return hashlib.md5(base.encode()).hexdigest() + +def load_sync_log(): + if SYNC_LOG_FILE.exists(): + try: + return json.loads(SYNC_LOG_FILE.read_text(encoding="utf-8")) + except json.JSONDecodeError: + return {} + return {} + +def save_sync_log(data): + SYNC_LOG_FILE.write_text(json.dumps(data, indent=2, ensure_ascii=False), encoding="utf-8") + +def log_sync_time(sync_data, table): + sync_data[table] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + +def guess_relation_name(from_col, to_table): + base = str(from_col).lower() + to = str(to_table).lower() + if base.endswith("_id"): + base = base[:-3] + return f"{base.upper()}_REL_{to.upper()}" + +def title_label(name: str) -> str: + # Label iz imena tabele: account_user -> Account_user (jednostavno) + if not name: + return "X" + return name[0].upper() + name[1:] + + +# ========================================================= +# HEURISTIČKO PREPOZNAVANJE FK I JOIN TABELA (CSV/JSON/API) +# ========================================================= +def detect_fk_columns(table, rows): + """Vraća listu kolona koje izgledaju kao FK: *_id.""" + if not rows: + return [] + # sve kolone zajedno iz svih redova (za slučaj nested/različite ključeve) + keys = set().union(*(r.keys() for r in rows)) + return [k for k in keys if k.endswith("_id")] + +def is_join_table_heuristic(table, rows, dataset): + """ + Join tabela ako: + - ima TAČNO 2 *_id kolone + - i obe bazne tabele postoje u datasetu + """ + fks = detect_fk_columns(table, rows) + if len(fks) != 2: + return False, [] + base1 = fks[0][:-3] + base2 = fks[1][:-3] + if base1 in dataset and base2 in dataset: + return True, fks + return False, fks + +def extra_rel_props(table, rows, fk_cols): + """Kolone koje nisu FK – postaju properties relacije (ako postoje).""" + if not rows: + return [] + keys = set().union(*(r.keys() for r in rows)) + return [k for k in keys if k not in fk_cols] + + +# ========================= +# SMART SYNC ENGINE +# ========================= +def smart_sync(dataset, driver): + with driver.session() as session: + stats_nodes = defaultdict(lambda: {"new": 0, "updated": 0, "deleted": 0}) + stats_rels = defaultdict(int) + sync_data = load_sync_log() + all_existing_nodes = set() + + tables = list(dataset.keys()) + info(f"Pronađeno tabela: {len(tables)}") + + # 0) pre-calc join tabele da ih preskočimo kao čvorove + join_tables = {} + for t in tables: + is_join, fks = is_join_table_heuristic(t, dataset[t], dataset) + if is_join: + join_tables[t] = fks + if join_tables: + info("Join tabele (heuristika): " + ", ".join(join_tables.keys())) + + # 1) NODES (INSERT/UPDATE) – skip join tables + print(c("1;44", "\n=== NODE SYNC ===")) + for table in tables: + # skip čiste join tabele (ne pravimo čvorove) + if table in join_tables: + info(f"Preskačem čvorove za join tabelu '{table}'") + log_sync_time(sync_data, table) + continue + + rows = dataset[table] + for row in rows: + if "id" not in row: + warn(f"{table}: red bez 'id' – preskačem: {row}") + continue + + row_hash = make_hash(row) + row["_hash"] = row_hash + all_existing_nodes.add((table, row["id"])) + + res = session.run( + f"MATCH (n:{title_label(table)} {{id: $id}}) RETURN n._hash AS h", + id=row["id"] + ).single() + + if res and res["h"] == row_hash: + # nema promene + continue + + if res: + stats_nodes[table]["updated"] += 1 + print(c("36", f"& UPDATE → {table} id={row['id']}")) + else: + stats_nodes[table]["new"] += 1 + print(c("32", f"+ INSERT → {table} id={row['id']}")) + + if not DRY_RUN: + props = ", ".join(f"n.{k} = ${k}" for k in row.keys()) + cypher = f""" + MERGE (n:{title_label(table)} {{id: $id}}) + ON CREATE SET {props}, n.tip = '{table}' + ON MATCH SET {props} + """ + session.run(cypher, **row) + + log_sync_time(sync_data, table) + + # 2) RELATIONSHIPS + print(c("1;44", "\n=== RELATION SYNC ===")) + for table in tables: + rows = dataset[table] + + # (A) JOIN TABELA ⇒ direktne relacije (sa props ako ima extra kolona) + if table in join_tables: + fk1, fk2 = join_tables[table] + base1, base2 = fk1[:-3], fk2[:-3] + rel_name = f"{base1.upper()}_{base2.upper()}_REL" + props_cols = extra_rel_props(table, rows, [fk1, fk2]) + + info(f"{table}: join → {base1} ⇢ {base2} [{rel_name}] (props={bool(props_cols)})") + + for row in rows: + a_id = row.get(fk1) + b_id = row.get(fk2) + if a_id is None or b_id is None: + continue + + params = {"a_id": a_id, "b_id": b_id} + props_set = "" + if props_cols: + for p in props_cols: + params[p] = row.get(p, None) + props_set = " {" + ", ".join(f"{p}: ${p}" for p in props_cols) + "}" + + stats_rels[rel_name] += 1 + print(c("35", f"MERGE rel → {base1}({a_id}) -[{rel_name}{' props' if props_cols else ''}]-> {base2}({b_id})")) + + if not DRY_RUN: + cypher = f""" + MATCH (a:{title_label(base1)} {{id: $a_id}}), + (b:{title_label(base2)} {{id: $b_id}}) + MERGE (a)-[r:{rel_name}]->(b) + {"SET " + ", ".join(f"r.{p} = ${p}" for p in props_cols) if props_cols else ""} + """ + session.run(cypher, **params) + + continue # idući table + + # (B) Regularne FK relacije – svaka *_id kolona vezuje na {base}.id ako takva tabela postoji + fk_cols = detect_fk_columns(table, rows) + for fk in fk_cols: + ref = fk[:-3] + if ref not in dataset: + continue # nema ciljane tabele u datasetu → preskoči + + rel_name = guess_relation_name(fk, ref) + info(f"{table}.{fk} -> {ref}.id [{rel_name}]") + + # nema “props” – ovo su obične 1:N veze + if not DRY_RUN: + cypher = f""" + MATCH (a:{title_label(table)}), (b:{title_label(ref)}) + WHERE a.{fk} = b.id + MERGE (a)-[:{rel_name}]->(b) + """ + session.run(cypher) + # statistika (približno – bez brojanja po redovima radi brzine) + stats_rels[rel_name] += len(rows) if rows else 0 + + # 3) DELETE “duhova” (čvorovi koji više ne postoje u izvoru) + print(c("1;44", "\n=== CLEANUP ===")) + for table in tables: + if table in join_tables: + continue # join tabele ne generišu čvorove + + ids_src = [i for (t, i) in all_existing_nodes if t == table] + res = session.run( + f"MATCH (n:{title_label(table)}) RETURN collect(n.id) AS ids" + ).single() + ids_neo = res["ids"] if res and res["ids"] else [] + to_delete = [i for i in ids_neo if i not in ids_src] + + if to_delete: + stats_nodes[table]["deleted"] += len(to_delete) + for i in to_delete: + print(c("31", f"× DELETE → {table} id={i}")) + if not DRY_RUN: + session.run( + f"MATCH (n:{title_label(table)}) WHERE n.id IN $ids DETACH DELETE n", + ids=to_delete, + ) + + # 4) META TAGS & LOG + if not DRY_RUN: + # meta: broj veza po čvoru + session.run(""" + MATCH (n) + OPTIONAL MATCH (n)-[r]-() + WITH n, count(r) AS broj_veza + SET n.count_of_rel = broj_veza + """) + save_sync_log(sync_data) + ok("Sinhronizacija upisana i meta tagovi ažurirani.") + else: + warn("DRY-RUN je uključen: Neo4j i sync log NISU menjani.") + + # 5) REZIME + print(c("1;44", "\n=== REZIME ===")) + total_new = total_upd = total_del = 0 + for t, s in stats_nodes.items(): + n, u, d = s["new"], s["updated"], s["deleted"] + if n or u or d: + print(f"{title_label(t):18} → + {n:3} & {u:3} × {d:3}") + total_new += n; total_upd += u; total_del += d + print(f"\nČVOROVI → + {total_new} & {total_upd} × {total_del}") + + if stats_rels: + print("\nVEZE:") + for rname, cnt in stats_rels.items(): + print(f" {rname:28} ~ {cnt}") + print("\n✅ Universal Smart Diff Sync v3.1 završeno.\n") + + +# ========================= +# MAIN +# ========================= +if __name__ == "__main__": + driver = GraphDatabase.driver(NEO4J_URI, auth=NEO4J_AUTH) + + # === IZABERI IZVOR (otkomentariši jednu liniju) === + # dataset = load_sqlite_dataset("importer/example.db") + # dataset = load_csv_dataset("data/") # npr. data/person.csv, data/project.csv, data/person_project.csv + # dataset = load_json_dataset("data.json") # JSON: { "person":[...], "project":[...], "person_project":[...] } + # dataset = load_api_dataset("https://example.com/api/export") + + # Primer: SQLite + dataset = load_sqlite_dataset("importer/example.db") + + smart_sync(dataset, driver) \ No newline at end of file diff --git a/utils/01 univ smart sync for Neo4j - v3.1/uputstvo/uni_smart_sync_v3.1_for_Neo4j.md b/utils/01 univ smart sync for Neo4j - v3.1/uputstvo/uni_smart_sync_v3.1_for_Neo4j.md new file mode 100644 index 00000000..ee367026 --- /dev/null +++ b/utils/01 univ smart sync for Neo4j - v3.1/uputstvo/uni_smart_sync_v3.1_for_Neo4j.md @@ -0,0 +1,392 @@ + +# Universal Smart Sync v3.1 for Neo4j + +Uključuje: + +- SQLite / CSV / JSON / API to Neo4j +- heurističko prepoznavanje join tabela, +- relacije sa property kolonama, +- hash diff-sync, +- DRY-RUN i +- čišćenje čvorova - duhova. + +> [!Note] +> Instalacija (jednom): +> +> ```sh +> pip install neo4j pandas requests +> ``` +> +> Pokretanje: +> +> - Sačuvaj skriptu kao `universal_smart_sync_v3_1.py` +> - U dnu fajla izaberi izvor (otkomentariši jednu liniju) +> +> ```sh +> `python universal_smart_sync_v3_1.py` +> ``` + +## `universal_smart_sync_v3_1.py` + +```python +import sqlite3 +from neo4j import GraphDatabase +import pandas as pd +import hashlib, json, requests, sys +from collections import defaultdict +from datetime import datetime +from pathlib import Path + +# ========================= +# CONFIG +# ========================= +NEO4J_URI = "bolt://localhost:7687" +NEO4J_AUTH = ("neo4j", "test") + +SYNC_LOG_FILE = Path("sync_log.json") + +# True = simulacija (NE PIŠE u Neo4j / log); False = piše promene +DRY_RUN = True + +# Boje za lepši log (on/off) +USE_COLOR = True + + +# ========================= +# UTIL: boje & ispisi +# ========================= +def c(code, s): + if not USE_COLOR: + return s + return f"\033[{code}m{s}\033[0m" + +def info(msg): print(c("36", f"[i] {msg}")) +def ok(msg): print(c("32", f"[✓] {msg}")) +def warn(msg): print(c("33", f"[!] {msg}")) +def err(msg): print(c("31", f"[x] {msg}")) + + +# ========================= +# DATA SOURCE ADAPTERS +# ========================= +def load_sqlite_dataset(db_path): + conn = sqlite3.connect(db_path) + cur = conn.cursor() + cur.execute("SELECT name FROM sqlite_master WHERE type='table'") + tables = [r[0] for r in cur.fetchall()] + dataset = {} + for t in tables: + df = pd.read_sql_query(f"SELECT * FROM {t}", conn) + dataset[t] = df.to_dict(orient="records") + ok(f"SQLite učitan ({db_path}) sa {len(dataset)} tabela.") + return dataset + +def load_csv_dataset(csv_dir): + dataset = {} + for f in Path(csv_dir).glob("*.csv"): + table = f.stem + df = pd.read_csv(f) + dataset[table] = df.to_dict(orient="records") + ok(f"CSV folder '{csv_dir}' učitan sa {len(dataset)} tabela.") + return dataset + +def load_json_dataset(json_path): + data = json.loads(Path(json_path).read_text(encoding="utf-8")) + ok(f"JSON '{json_path}' učitan sa {len(data)} tabela.") + return data + +def load_api_dataset(api_url): + r = requests.get(api_url, timeout=60) + r.raise_for_status() + data = r.json() + ok(f"API '{api_url}' učitan sa {len(data)} tabela.") + return data + + +# ========================= +# SYNC UTILS +# ========================= +def make_hash(row): + # stabilan hash nad svim ključevima (None -> "") + row_str = "|".join(f"{k}:{'' if row[k] is None else row[k]}" for k in sorted(row)) + return hashlib.md5(row_str.encode()).hexdigest() + +def make_rel_hash(a_id, b_id, props_dict): + # hash za relaciju (krajevi + props) – koristi se samo za DRY dif log + base = f"a:{a_id}|b:{b_id}|" + "|".join(f"{k}:{props_dict.get(k)}" for k in sorted(props_dict)) + return hashlib.md5(base.encode()).hexdigest() + +def load_sync_log(): + if SYNC_LOG_FILE.exists(): + try: + return json.loads(SYNC_LOG_FILE.read_text(encoding="utf-8")) + except json.JSONDecodeError: + return {} + return {} + +def save_sync_log(data): + SYNC_LOG_FILE.write_text(json.dumps(data, indent=2, ensure_ascii=False), encoding="utf-8") + +def log_sync_time(sync_data, table): + sync_data[table] = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + +def guess_relation_name(from_col, to_table): + base = str(from_col).lower() + to = str(to_table).lower() + if base.endswith("_id"): + base = base[:-3] + return f"{base.upper()}_REL_{to.upper()}" + +def title_label(name: str) -> str: + # Label iz imena tabele: account_user -> Account_user (jednostavno) + if not name: + return "X" + return name[0].upper() + name[1:] + + +# ========================================================= +# HEURISTIČKO PREPOZNAVANJE FK I JOIN TABELA (CSV/JSON/API) +# ========================================================= +def detect_fk_columns(table, rows): + """Vraća listu kolona koje izgledaju kao FK: *_id.""" + if not rows: + return [] + # sve kolone zajedno iz svih redova (za slučaj nested/različite ključeve) + keys = set().union(*(r.keys() for r in rows)) + return [k for k in keys if k.endswith("_id")] + +def is_join_table_heuristic(table, rows, dataset): + """ + Join tabela ako: + - ima TAČNO 2 *_id kolone + - i obe bazne tabele postoje u datasetu + """ + fks = detect_fk_columns(table, rows) + if len(fks) != 2: + return False, [] + base1 = fks[0][:-3] + base2 = fks[1][:-3] + if base1 in dataset and base2 in dataset: + return True, fks + return False, fks + +def extra_rel_props(table, rows, fk_cols): + """Kolone koje nisu FK – postaju properties relacije (ako postoje).""" + if not rows: + return [] + keys = set().union(*(r.keys() for r in rows)) + return [k for k in keys if k not in fk_cols] + + +# ========================= +# SMART SYNC ENGINE +# ========================= +def smart_sync(dataset, driver): + with driver.session() as session: + stats_nodes = defaultdict(lambda: {"new": 0, "updated": 0, "deleted": 0}) + stats_rels = defaultdict(int) + sync_data = load_sync_log() + all_existing_nodes = set() + + tables = list(dataset.keys()) + info(f"Pronađeno tabela: {len(tables)}") + + # 0) pre-calc join tabele da ih preskočimo kao čvorove + join_tables = {} + for t in tables: + is_join, fks = is_join_table_heuristic(t, dataset[t], dataset) + if is_join: + join_tables[t] = fks + if join_tables: + info("Join tabele (heuristika): " + ", ".join(join_tables.keys())) + + # 1) NODES (INSERT/UPDATE) – skip join tables + print(c("1;44", "\n=== NODE SYNC ===")) + for table in tables: + # skip čiste join tabele (ne pravimo čvorove) + if table in join_tables: + info(f"Preskačem čvorove za join tabelu '{table}'") + log_sync_time(sync_data, table) + continue + + rows = dataset[table] + for row in rows: + if "id" not in row: + warn(f"{table}: red bez 'id' – preskačem: {row}") + continue + + row_hash = make_hash(row) + row["_hash"] = row_hash + all_existing_nodes.add((table, row["id"])) + + res = session.run( + f"MATCH (n:{title_label(table)} {{id: $id}}) RETURN n._hash AS h", + id=row["id"] + ).single() + + if res and res["h"] == row_hash: + # nema promene + continue + + if res: + stats_nodes[table]["updated"] += 1 + print(c("36", f"& UPDATE → {table} id={row['id']}")) + else: + stats_nodes[table]["new"] += 1 + print(c("32", f"+ INSERT → {table} id={row['id']}")) + + if not DRY_RUN: + props = ", ".join(f"n.{k} = ${k}" for k in row.keys()) + cypher = f""" + MERGE (n:{title_label(table)} {{id: $id}}) + ON CREATE SET {props}, n.tip = '{table}' + ON MATCH SET {props} + """ + session.run(cypher, **row) + + log_sync_time(sync_data, table) + + # 2) RELATIONSHIPS + print(c("1;44", "\n=== RELATION SYNC ===")) + for table in tables: + rows = dataset[table] + + # (A) JOIN TABELA ⇒ direktne relacije (sa props ako ima extra kolona) + if table in join_tables: + fk1, fk2 = join_tables[table] + base1, base2 = fk1[:-3], fk2[:-3] + rel_name = f"{base1.upper()}_{base2.upper()}_REL" + props_cols = extra_rel_props(table, rows, [fk1, fk2]) + + info(f"{table}: join → {base1} ⇢ {base2} [{rel_name}] (props={bool(props_cols)})") + + for row in rows: + a_id = row.get(fk1) + b_id = row.get(fk2) + if a_id is None or b_id is None: + continue + + params = {"a_id": a_id, "b_id": b_id} + props_set = "" + if props_cols: + for p in props_cols: + params[p] = row.get(p, None) + props_set = " {" + ", ".join(f"{p}: ${p}" for p in props_cols) + "}" + + stats_rels[rel_name] += 1 + print(c("35", f"MERGE rel → {base1}({a_id}) -[{rel_name}{' props' if props_cols else ''}]-> {base2}({b_id})")) + + if not DRY_RUN: + cypher = f""" + MATCH (a:{title_label(base1)} {{id: $a_id}}), + (b:{title_label(base2)} {{id: $b_id}}) + MERGE (a)-[r:{rel_name}]->(b) + {"SET " + ", ".join(f"r.{p} = ${p}" for p in props_cols) if props_cols else ""} + """ + session.run(cypher, **params) + + continue # idući table + + # (B) Regularne FK relacije – svaka *_id kolona vezuje na {base}.id ako takva tabela postoji + fk_cols = detect_fk_columns(table, rows) + for fk in fk_cols: + ref = fk[:-3] + if ref not in dataset: + continue # nema ciljane tabele u datasetu → preskoči + + rel_name = guess_relation_name(fk, ref) + info(f"{table}.{fk} -> {ref}.id [{rel_name}]") + + # nema “props” – ovo su obične 1:N veze + if not DRY_RUN: + cypher = f""" + MATCH (a:{title_label(table)}), (b:{title_label(ref)}) + WHERE a.{fk} = b.id + MERGE (a)-[:{rel_name}]->(b) + """ + session.run(cypher) + # statistika (približno – bez brojanja po redovima radi brzine) + stats_rels[rel_name] += len(rows) if rows else 0 + + # 3) DELETE “duhova” (čvorovi koji više ne postoje u izvoru) + print(c("1;44", "\n=== CLEANUP ===")) + for table in tables: + if table in join_tables: + continue # join tabele ne generišu čvorove + + ids_src = [i for (t, i) in all_existing_nodes if t == table] + res = session.run( + f"MATCH (n:{title_label(table)}) RETURN collect(n.id) AS ids" + ).single() + ids_neo = res["ids"] if res and res["ids"] else [] + to_delete = [i for i in ids_neo if i not in ids_src] + + if to_delete: + stats_nodes[table]["deleted"] += len(to_delete) + for i in to_delete: + print(c("31", f"× DELETE → {table} id={i}")) + if not DRY_RUN: + session.run( + f"MATCH (n:{title_label(table)}) WHERE n.id IN $ids DETACH DELETE n", + ids=to_delete, + ) + + # 4) META TAGS & LOG + if not DRY_RUN: + # meta: broj veza po čvoru + session.run(""" + MATCH (n) + OPTIONAL MATCH (n)-[r]-() + WITH n, count(r) AS broj_veza + SET n.count_of_rel = broj_veza + """) + save_sync_log(sync_data) + ok("Sinhronizacija upisana i meta tagovi ažurirani.") + else: + warn("DRY-RUN je uključen: Neo4j i sync log NISU menjani.") + + # 5) REZIME + print(c("1;44", "\n=== REZIME ===")) + total_new = total_upd = total_del = 0 + for t, s in stats_nodes.items(): + n, u, d = s["new"], s["updated"], s["deleted"] + if n or u or d: + print(f"{title_label(t):18} → + {n:3} & {u:3} × {d:3}") + total_new += n; total_upd += u; total_del += d + print(f"\nČVOROVI → + {total_new} & {total_upd} × {total_del}") + + if stats_rels: + print("\nVEZE:") + for rname, cnt in stats_rels.items(): + print(f" {rname:28} ~ {cnt}") + print("\n✅ Universal Smart Diff Sync v3.1 završeno.\n") + + +# ========================= +# MAIN +# ========================= +if __name__ == "__main__": + driver = GraphDatabase.driver(NEO4J_URI, auth=NEO4J_AUTH) + + # === IZABERI IZVOR (otkomentariši jednu liniju) === + # dataset = load_sqlite_dataset("primer.db") + # dataset = load_csv_dataset("data/") # npr. data/person.csv, data/project.csv, data/person_project.csv + # dataset = load_json_dataset("data.json") # JSON: { "person":[...], "project":[...], "person_project":[...] } + # dataset = load_api_dataset("https://example.com/api/export") + + # Primer: SQLite + dataset = load_sqlite_dataset("primer.db") + + smart_sync(dataset, driver) +``` + +## Kako radi heuristika za CSV/JSON/API + +- **Join tabela**: ako tabela ima tačno dve kolone koje se završavaju + na `_id`** i obe njihove bazne tabele postoje u datasetu → tretira se kao agregacija. + - Ako ima **samo ta dva FK** → pravi se **relacija bez props**. + - Ako ima **još kolona** → te kolone postaju **properties relacije**. + +- **Regularan FK (1:N)**: svaka kolona `*_id` pravi MERGE ka tabeli ` + {base}.id`, ako ta tabela postoji. +- **Čvorovi se ne prave** za join tabele (čiste ili sa props). diff --git a/utils/02 jampy app scaffolding/example.db b/utils/02 jampy app scaffolding/example.db new file mode 100644 index 00000000..35c77458 Binary files /dev/null and b/utils/02 jampy app scaffolding/example.db differ diff --git a/utils/02 jampy app scaffolding/scaffold_common.py b/utils/02 jampy app scaffolding/scaffold_common.py new file mode 100644 index 00000000..14680d19 --- /dev/null +++ b/utils/02 jampy app scaffolding/scaffold_common.py @@ -0,0 +1,134 @@ + +import json +import re +import datetime + +# KONSTANTE – zajedničke za sve drajvere + +# SYS_ITEMS config +ITEM_START_ID = 6 +PARENT_ID = 2 +TASK_ID = 1 +TYPE_ID = 10 +VISIBLE = 1 +DELETED = 0 +TABLE_ID = 0 +F_VIRTUAL_TABLE = 0 +F_SOFT_DELETE = 0 + +# SYS_FIELDS config +FIELD_START_ID = 12 +OWNER_ID = 3 +#F_ALIGNMENT = 1 +F_TEXTAREA = 0 +F_DO_NOT_SANITIZE = 0 +F_CALC_LOOKUP_FIELD = 0 +F_REQUIRED = 0 + +DEFAULT_GROUP = 1 +DEFAULT_OWNER = 3 + +# JAM.PY TYPES +# TEXT, INTEGER, FLOAT, CURRENCY, DATE, DATETIME, BOOLEAN, LONGTEXT, KEYS, FILE, IMAGE = range(1, 12) + +# Tipovi koji se prevode u Jam.py field_type vrednosti +TYPE_MAPPING = { + "VARCHAR": "TEXT", + "CHAR": "TEXT", + "TEXT": "TEXT", + "INTEGER": "INTEGER", + "INT": "INTEGER", + "SMALLINT": "INTEGER", + "BIGINT": "INTEGER", + "REAL": "FLOAT", + "FLOAT": "FLOAT", + "DOUBLE": "FLOAT", + "DECIMAL": "CURRENCY", + "NUMERIC": "CURRENCY", + "DATE": "DATE", + "DATETIME": "DATETIME", + "TIMESTAMP": "DATETIME", + "BOOLEAN": "BOOLEAN", + "BLOB": "LONGTEXT" +} + +# POMOĆNE FUNKCIJE – svi drajveri ih koriste + +def to_camel_case(s: str) -> str: + """Pretvara ime tabele/kolone u CamelCase.""" + return "".join(w.capitalize() for w in re.split(r"[_\W]+", s) if w) + +def to_caption(s: str) -> str: + """Kreira lep naslov iz imena kolone.""" + s = s.replace("_", " ").strip().capitalize() + return re.sub(r"\s+", " ", s) + +def sanitize_field_name(name: str) -> str: + """Uklanja nedozvoljene karaktere iz imena kolone.""" + return re.sub(r"[^\w_]", "", name) + +def get_f_data_type(col_type: str) -> str: + """Vraća Jam.py tip podatka na osnovu SQL tipa.""" + if not col_type: + return 'TEXT' + + base_type = col_type.upper().split('(')[0].strip() + + # grupisani SQL tipovi prema Jam.py podršci + if base_type in ('INT', 'INTEGER', 'SMALLINT', 'BIGINT', 'BOOLEAN'): + return 'INTEGER' + elif base_type in ('REAL', 'FLOAT', 'DOUBLE', 'DECIMAL', 'NUMERIC', 'CURRENCY'): + return 'FLOAT' + elif base_type in ('DATE', 'DATETIME', 'TIMESTAMP'): + return 'TEXT' # Jam.py datume tretira kao TEXT + elif base_type in ('BLOB', 'LONGTEXT', 'TEXT', 'CHAR', 'VARCHAR'): + return 'TEXT' + else: + return 'TEXT' + +def make_field_info(name: str, data_type: str, pk=False, not_null=False) -> str: + """Generiše JSON string za SYS_FIELDS.f_info.""" + info = { + "field_name": name, + "data_type": data_type, + "primary_key": bool(pk), + "not_null": bool(not_null), + "created_at": datetime.datetime.now().isoformat() + } + return json.dumps(info, ensure_ascii=False) + +def debug(msg: str): + """Lagan debug print, lako se može zameniti loggerom.""" + print(f"DEBUG: {msg}") + +# POMOĆNE STRUKTURE + +def build_field_record(table_id, name, col_type, pk=False, not_null=False, field_id=None): + """Kreira jedan red za SYS_FIELDS.""" + data_type = get_f_data_type(col_type) + f_info = make_field_info(name, data_type, pk=pk, not_null=not_null) + + return { + "id": field_id or None, + "f_table": table_id, + "f_name": sanitize_field_name(name), + "f_caption": to_caption(name), + "f_data_type": data_type, + "f_not_null": int(not_null), + "f_primary_key": int(pk), + "f_info": f_info, + } + +def build_item_record(name, group=DEFAULT_GROUP, owner=DEFAULT_OWNER, item_id=None): + """Kreira jedan red za SYS_ITEMS (tabelu).""" + return { + "id": item_id or None, + "f_name": name, + "f_caption": to_caption(name), + "f_owner": owner, + "f_group": group, + "f_info": json.dumps({ + "table": name, + "created_at": datetime.datetime.now().isoformat() + }, ensure_ascii=False), + } diff --git a/utils/02 jampy app scaffolding/scaffold_firebird.py b/utils/02 jampy app scaffolding/scaffold_firebird.py new file mode 100644 index 00000000..e39610a8 --- /dev/null +++ b/utils/02 jampy app scaffolding/scaffold_firebird.py @@ -0,0 +1,152 @@ +""" +scaffold_firebird.py +Scaffold implementacija za Firebird bazu (Jam.py kompatibilna). +Koristi zajednički modul scaffold_common. +""" + +import fdb # Firebird SQL driver +from pathlib import Path +import scaffold_common as common + + +# KONEKCIJA +def connect_to_database(db_info: dict): + """Uspostavlja konekciju sa Firebird bazom.""" + try: + conn = fdb.connect( + dsn=f"{db_info.get('host', 'localhost')}/{db_info.get('port', 3050)}:{db_info.get('db')}", + user=db_info.get("user", "sysdba"), + password=db_info.get("password", "masterkey") + ) + common.debug(f"Povezivanje na Firebird bazu: {db_info.get('db')}") + return conn + except Exception as e: + raise ConnectionError(f"Greška pri konekciji na Firebird: {e}") + + + +# STRUKTURA BAZE +def get_table_names(conn): + """Vraća listu tabela iz Firebird baze.""" + cur = conn.cursor() + cur.execute(""" + SELECT RDB$RELATION_NAME + FROM RDB$RELATIONS + WHERE RDB$SYSTEM_FLAG = 0 AND RDB$VIEW_BLR IS NULL; + """) + tables = [r[0].strip() for r in cur.fetchall()] + cur.close() + common.debug(f"Pronađeno tabela: {tables}") + return tables + + +def get_table_info(conn, table_name: str): + """Vraća detalje o kolonama u tabeli.""" + cur = conn.cursor() + cur.execute(""" + SELECT + rf.RDB$FIELD_NAME, + f.RDB$FIELD_TYPE, + f.RDB$FIELD_SUB_TYPE, + COALESCE(f.RDB$FIELD_LENGTH, 0), + COALESCE(f.RDB$FIELD_PRECISION, 0), + COALESCE(f.RDB$FIELD_SCALE, 0), + COALESCE(rf.RDB$NULL_FLAG, 0) + FROM RDB$RELATION_FIELDS rf + JOIN RDB$FIELDS f ON rf.RDB$FIELD_SOURCE = f.RDB$FIELD_NAME + WHERE rf.RDB$RELATION_NAME = ? + ORDER BY rf.RDB$FIELD_POSITION; + """, (table_name,)) + columns = [] + for row in cur.fetchall(): + name = row[0].strip() + col_type = f"TYPE_{row[1]}" + not_null = bool(row[6]) + pk = is_primary_key(conn, table_name, name) + columns.append({ + "col_name": name, + "col_type": col_type, + "col_constraints": "", + "pk": pk, + "not_null": not_null + }) + cur.close() + return columns + + +def is_primary_key(conn, table_name: str, col_name: str) -> bool: + """Proverava da li je kolona primarni ključ u Firebird tabeli.""" + cur = conn.cursor() + cur.execute(""" + SELECT sg.RDB$FIELD_NAME + FROM RDB$INDEX_SEGMENTS sg + JOIN RDB$INDICES i ON i.RDB$INDEX_NAME = sg.RDB$INDEX_NAME + WHERE i.RDB$RELATION_NAME = ? AND i.RDB$UNIQUE_FLAG = 1; + """, (table_name,)) + pk_cols = [r[0].strip() for r in cur.fetchall()] + cur.close() + return col_name in pk_cols + + + +# UPIS U admin.sqlite +def write_to_admin(conn_src, admin_path: Path, db_info: dict): + """Upisuje tabele i kolone iz Firebird baze u Jam.py admin.sqlite.""" + import sqlite3 + if not admin_path.exists(): + raise FileNotFoundError(f"admin.sqlite nije pronađen na {admin_path}") + + conn_admin = sqlite3.connect(admin_path) + cur_admin = conn_admin.cursor() + + cur_admin.execute("DELETE FROM SYS_ITEMS;") + cur_admin.execute("DELETE FROM SYS_FIELDS;") + + tables = get_table_names(conn_src) + item_id = common.ITEM_START_ID + field_id = common.FIELD_START_ID + + for tname in tables: + columns = get_table_info(conn_src, tname) + + item_rec = common.build_item_record( + name=tname, + item_id=item_id, + group=db_info.get("group", common.DEFAULT_GROUP), + owner=db_info.get("owner", common.DEFAULT_OWNER) + ) + cur_admin.execute(""" + INSERT INTO SYS_ITEMS (id, f_name, f_caption, f_group, f_owner, f_info) + VALUES (:id, :f_name, :f_caption, :f_group, :f_owner, :f_info) + """, item_rec) + + for col in columns: + field_rec = common.build_field_record( + table_id=item_id, + name=col["col_name"], + col_type=col["col_type"], + pk=col["pk"], + not_null=col["not_null"], + field_id=field_id + ) + cur_admin.execute(""" + INSERT INTO SYS_FIELDS + (id, f_table, f_name, f_caption, f_data_type, f_not_null, f_primary_key, f_info) + VALUES (:id, :f_table, :f_name, :f_caption, :f_data_type, :f_not_null, :f_primary_key, :f_info) + """, field_rec) + field_id += 1 + + item_id += 1 + + conn_admin.commit() + conn_admin.close() + common.debug(f"Zapisano {len(tables)} tabela u admin.sqlite ({admin_path})") + + +# GLAVNA ENTRY TAČKA +def my_database_procedure(conn, db_info: dict): + """Glavna procedura – povezuje sve korake.""" + admin_path = Path(db_info.get("admin_path", "admin.sqlite")) + common.debug(f"Pokrećem scaffold za Firebird → {admin_path}") + write_to_admin(conn, admin_path, db_info) + common.debug("Scaffold Firebird završio uspešno.") diff --git a/utils/02 jampy app scaffolding/scaffold_loader.py b/utils/02 jampy app scaffolding/scaffold_loader.py new file mode 100644 index 00000000..0f1df038 --- /dev/null +++ b/utils/02 jampy app scaffolding/scaffold_loader.py @@ -0,0 +1,82 @@ +""" +scaffold_loader.py +Univerzalni loader za Jam.py scaffold sisteme. + +Automatski prepoznaje drajver (sqlite, mysql, postgres, firebird) +i poziva odgovarajući scaffold_{driver}.py modul. +""" + +import importlib +import sys +import scaffold_common as common + + +# ============================================================ +# ⚙️ GLAVNA FUNKCIJA +# ============================================================ +def run_scaffold(db_info: dict): + """ + Pokreće odgovarajući scaffold modul na osnovu drajvera. + + db_info primer: + { + "driver": "mysql", + "db": "primer_db", + "host": "localhost", + "user": "root", + "password": "pass", + "admin_path": "admin.sqlite", + "group": 1, + "owner": 1 + } + """ + + driver = db_info.get("driver", "sqlite").lower().strip() + module_name = f"scaffold_{driver}" + + common.debug(f"🔍 Detektovan drajver: {driver}") + + try: + scaffold_module = importlib.import_module(module_name) + except ModuleNotFoundError: + raise ImportError(f"❌ Modul {module_name}.py nije pronađen!") + + if not hasattr(scaffold_module, "connect_to_database") or not hasattr(scaffold_module, "my_database_procedure"): + raise AttributeError(f"❌ Modul {module_name} ne implementira potrebne funkcije!") + + common.debug(f"⚙️ Pokrećem scaffold iz {module_name}.py") + + # 1️⃣ Konekcija + conn = scaffold_module.connect_to_database(db_info) + if not conn: + raise RuntimeError(f"❌ Nije moguće uspostaviti konekciju ({driver})") + + # 2️⃣ Scaffold proces + scaffold_module.my_database_procedure(conn, db_info) + + # 3️⃣ Zatvaranje konekcije + try: + conn.close() + except Exception: + pass + + common.debug(f"✅ Scaffold kompletiran za drajver: {driver}") + + +# ============================================================ +# 🚀 CLI ENTRY TAČKA +# ============================================================ +if __name__ == "__main__": + # primer ručnog pokretanja + db_info = { + "driver": "sqlite", # ili mysql / postgres / firebird + "db": "primer.db", + "admin_path": "admin.sqlite", + "group": 1, + "owner": 1, + "user": "root", + "password": "test", + "host": "localhost", + "port": 5432 + } + run_scaffold(db_info) diff --git a/utils/02 jampy app scaffolding/scaffold_mysql.py b/utils/02 jampy app scaffolding/scaffold_mysql.py new file mode 100644 index 00000000..ebfb0122 --- /dev/null +++ b/utils/02 jampy app scaffolding/scaffold_mysql.py @@ -0,0 +1,153 @@ +""" +scaffold_mysql.py +Scaffold implementacija za MySQL bazu (Jam.py kompatibilna). +Koristi zajednički modul scaffold_common za pomoćne funkcije i konstante. +""" + +import mysql.connector +from mysql.connector import Error +import json +from pathlib import Path +import scaffold_common as common + + +# ============================================================ +# 🔌 1. KONEKCIJA +# ============================================================ +def connect_to_database(db_info: dict): + """Uspostavlja konekciju sa MySQL bazom.""" + try: + conn = mysql.connector.connect( + host=db_info.get("host", "localhost"), + user=db_info.get("user", ""), + password=db_info.get("password", ""), + database=db_info.get("db", ""), + port=db_info.get("port", 3306) + ) + if conn.is_connected(): + common.debug(f"Povezivanje na MySQL bazu: {db_info.get('db')}") + return conn + except Error as e: + raise ConnectionError(f"Greška pri konekciji na MySQL: {e}") + return None + + +# ============================================================ +# 🧱 2. STRUKTURA BAZE +# ============================================================ +def get_table_names(conn): + """Vraća listu tabela iz MySQL baze.""" + cursor = conn.cursor() + cursor.execute("SHOW TABLES;") + tables = [r[0] for r in cursor.fetchall()] + cursor.close() + common.debug(f"Pronađeno tabela: {tables}") + return tables + + +def get_table_info(conn, table_name: str): + """Vraća detalje o kolonama u tabeli.""" + cursor = conn.cursor(dictionary=True) + cursor.execute(f"SHOW COLUMNS FROM `{table_name}`;") + columns = [] + for row in cursor.fetchall(): + columns.append({ + "col_name": row["Field"], + "col_type": row["Type"], + "col_constraints": row.get("Key", ""), + "pk": row["Key"] == "PRI", + "not_null": row["Null"].upper() == "NO" + }) + cursor.close() + return columns + + +# ============================================================ +# 🧮 3. UPIS U admin.sqlite +# ============================================================ +def write_to_admin(conn_src, admin_path: Path, db_info: dict): + """Upisuje tabele i kolone iz MySQL baze u Jam.py admin.sqlite.""" + if not admin_path.exists(): + raise FileNotFoundError(f"admin.sqlite nije pronađen na {admin_path}") + + import sqlite3 + conn_admin = sqlite3.connect(admin_path) + cur_admin = conn_admin.cursor() + + # brišemo postojeće meta-podatke + cur_admin.execute("DELETE FROM SYS_ITEMS;") + cur_admin.execute("DELETE FROM SYS_FIELDS;") + + tables = get_table_names(conn_src) + item_id = common.ITEM_START_ID + field_id = common.FIELD_START_ID + + for tname in tables: + columns = get_table_info(conn_src, tname) + + # Kreiraj zapis za SYS_ITEMS + item_rec = common.build_item_record( + name=tname, + item_id=item_id, + group=db_info.get("group", common.DEFAULT_GROUP), + owner=db_info.get("owner", common.DEFAULT_OWNER) + ) + cur_admin.execute(""" + INSERT INTO SYS_ITEMS (id, f_name, f_caption, f_group, f_owner, f_info) + VALUES (:id, :f_name, :f_caption, :f_group, :f_owner, :f_info) + """, item_rec) + + # Kreiraj zapise za SYS_FIELDS + for col in columns: + field_rec = common.build_field_record( + table_id=item_id, + name=col["col_name"], + col_type=col["col_type"], + pk=col["pk"], + not_null=col["not_null"], + field_id=field_id + ) + cur_admin.execute(""" + INSERT INTO SYS_FIELDS + (id, f_table, f_name, f_caption, f_data_type, f_not_null, f_primary_key, f_info) + VALUES (:id, :f_table, :f_name, :f_caption, :f_data_type, :f_not_null, :f_primary_key, :f_info) + """, field_rec) + field_id += 1 + + item_id += 1 + + conn_admin.commit() + conn_admin.close() + common.debug(f"Zapisano {len(tables)} tabela u admin.sqlite ({admin_path})") + + +# ============================================================ +# 🏗️ 4. GLAVNA ENTRY TAČKA +# ============================================================ +def my_database_procedure(conn, db_info: dict): + """Glavna procedura – povezuje sve korake.""" + admin_path = Path(db_info.get("admin_path", "admin.sqlite")) + common.debug(f"Pokrećem scaffold za MySQL → {admin_path}") + write_to_admin(conn, admin_path, db_info) + common.debug("Scaffold MySQL završio uspešno.") + + +# ============================================================ +# ✅ 5. GLAVNI SCENARIO (CLI FRIENDLY) +# ============================================================ +if __name__ == "__main__": + # primer testnog pokretanja + info = { + "db": "primer_db", + "host": "localhost", + "user": "root", + "password": "password", + "admin_path": "admin.sqlite", + "group": 1, + "owner": 1 + } + conn = connect_to_database(info) + if conn: + my_database_procedure(conn, info) + conn.close() + print("✅ Scaffold MySQL kompletiran.") \ No newline at end of file diff --git a/utils/02 jampy app scaffolding/scaffold_postgres.py b/utils/02 jampy app scaffolding/scaffold_postgres.py new file mode 100644 index 00000000..6c07d44a --- /dev/null +++ b/utils/02 jampy app scaffolding/scaffold_postgres.py @@ -0,0 +1,154 @@ +""" +scaffold_postgres.py +Scaffold implementacija za PostgreSQL bazu (Jam.py kompatibilna). +Koristi zajednički modul scaffold_common. +""" + +import psycopg2 +from psycopg2 import sql +from psycopg2.extras import DictCursor +from pathlib import Path +import scaffold_common as common + + +# ============================================================ +# 🔌 1. KONEKCIJA +# ============================================================ +def connect_to_database(db_info: dict): + """Uspostavlja konekciju sa PostgreSQL bazom.""" + try: + conn = psycopg2.connect( + dbname=db_info.get("db", ""), + user=db_info.get("user", ""), + password=db_info.get("password", ""), + host=db_info.get("host", "localhost"), + port=db_info.get("port", 5432) + ) + common.debug(f"Povezivanje na PostgreSQL bazu: {db_info.get('db')}") + return conn + except Exception as e: + raise ConnectionError(f"Greška pri konekciji na PostgreSQL: {e}") + + +# ============================================================ +# 🧱 2. STRUKTURA BAZE +# ============================================================ +def get_table_names(conn): + """Vraća listu tabela iz PostgreSQL baze.""" + cursor = conn.cursor() + cursor.execute(""" + SELECT table_name FROM information_schema.tables + WHERE table_schema = 'public' AND table_type='BASE TABLE'; + """) + tables = [r[0] for r in cursor.fetchall()] + cursor.close() + common.debug(f"Pronađeno tabela: {tables}") + return tables + + +def get_table_info(conn, table_name: str): + """Vraća detalje o kolonama u tabeli.""" + cursor = conn.cursor(cursor_factory=DictCursor) + cursor.execute(sql.SQL(""" + SELECT + column_name, + data_type, + is_nullable, + column_default, + ordinal_position + FROM information_schema.columns + WHERE table_name = %s; + """), [table_name]) + columns = [] + for row in cursor.fetchall(): + columns.append({ + "col_name": row["column_name"], + "col_type": row["data_type"], + "col_constraints": "", + "pk": is_primary_key(conn, table_name, row["column_name"]), + "not_null": row["is_nullable"].upper() == "NO" + }) + cursor.close() + return columns + + +def is_primary_key(conn, table_name: str, col_name: str) -> bool: + """Pomoćna funkcija: proverava da li je kolona primarni ključ.""" + cursor = conn.cursor() + cursor.execute(""" + SELECT kcu.column_name + FROM information_schema.table_constraints tc + JOIN information_schema.key_column_usage kcu + ON tc.constraint_name = kcu.constraint_name + WHERE tc.table_name = %s AND tc.constraint_type = 'PRIMARY KEY'; + """, [table_name]) + pk_cols = [r[0] for r in cursor.fetchall()] + cursor.close() + return col_name in pk_cols + + +# ============================================================ +# 🧮 3. UPIS U admin.sqlite +# ============================================================ +def write_to_admin(conn_src, admin_path: Path, db_info: dict): + """Upisuje tabele i kolone iz PostgreSQL baze u Jam.py admin.sqlite.""" + import sqlite3 + if not admin_path.exists(): + raise FileNotFoundError(f"admin.sqlite nije pronađen na {admin_path}") + + conn_admin = sqlite3.connect(admin_path) + cur_admin = conn_admin.cursor() + + cur_admin.execute("DELETE FROM SYS_ITEMS;") + cur_admin.execute("DELETE FROM SYS_FIELDS;") + + tables = get_table_names(conn_src) + item_id = common.ITEM_START_ID + field_id = common.FIELD_START_ID + + for tname in tables: + columns = get_table_info(conn_src, tname) + + item_rec = common.build_item_record( + name=tname, + item_id=item_id, + group=db_info.get("group", common.DEFAULT_GROUP), + owner=db_info.get("owner", common.DEFAULT_OWNER) + ) + cur_admin.execute(""" + INSERT INTO SYS_ITEMS (id, f_name, f_caption, f_group, f_owner, f_info) + VALUES (:id, :f_name, :f_caption, :f_group, :f_owner, :f_info) + """, item_rec) + + for col in columns: + field_rec = common.build_field_record( + table_id=item_id, + name=col["col_name"], + col_type=col["col_type"], + pk=col["pk"], + not_null=col["not_null"], + field_id=field_id + ) + cur_admin.execute(""" + INSERT INTO SYS_FIELDS + (id, f_table, f_name, f_caption, f_data_type, f_not_null, f_primary_key, f_info) + VALUES (:id, :f_table, :f_name, :f_caption, :f_data_type, :f_not_null, :f_primary_key, :f_info) + """, field_rec) + field_id += 1 + + item_id += 1 + + conn_admin.commit() + conn_admin.close() + common.debug(f"Zapisano {len(tables)} tabela u admin.sqlite ({admin_path})") + + +# ============================================================ +# 🏗️ 4. GLAVNA ENTRY TAČKA +# ============================================================ +def my_database_procedure(conn, db_info: dict): + """Glavna procedura – povezuje sve korake.""" + admin_path = Path(db_info.get("admin_path", "admin.sqlite")) + common.debug(f"Pokrećem scaffold za PostgreSQL → {admin_path}") + write_to_admin(conn, admin_path, db_info) + common.debug("Scaffold PostgreSQL završio uspešno.") \ No newline at end of file diff --git a/utils/02 jampy app scaffolding/scaffold_sqlite.py b/utils/02 jampy app scaffolding/scaffold_sqlite.py new file mode 100644 index 00000000..ff32ef7c --- /dev/null +++ b/utils/02 jampy app scaffolding/scaffold_sqlite.py @@ -0,0 +1,122 @@ +""" +Scaffold implementacija za SQLite bazu (Jam.py kompatibilna). +Korišćenje zajedničkog modula scaffold_common za pomoćne funkcije i konstante. +""" + +import sqlite3 +import json +from pathlib import Path +import scaffold_common as common + +# KONEKCIJA +def connect_to_database(db_info: dict) -> sqlite3.Connection: + """Uspostavlja konekciju sa SQLite bazom.""" + db_path = db_info.get("db") + if not db_path: + raise ValueError("Nedostaje putanja do SQLite baze (db_info['db']).") + + common.debug(f"Povezivanje na SQLite bazu: {db_path}") + return sqlite3.connect(db_path) + +# STRUKTURA BAZE +def get_table_names(conn: sqlite3.Connection): + """Vraća listu tabela iz SQLite baze.""" + cur = conn.cursor() + cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%';") + tables = [r[0] for r in cur.fetchall()] + common.debug(f"Pronađeno tabela: {tables}") + return tables + +def get_table_info(conn: sqlite3.Connection, table_name: str): + """Vraća detalje o kolonama u tabeli.""" + cur = conn.cursor() + cur.execute(f"PRAGMA table_info({table_name})") + columns = [] + for cid, name, col_type, notnull, dflt_value, pk in cur.fetchall(): + columns.append({ + "col_name": name, + "col_type": col_type or "TEXT", + "col_constraints": "", + "pk": bool(pk), + "not_null": bool(notnull) + }) + return columns + +# UPIS U admin.sqlite +def write_to_admin(conn_src: sqlite3.Connection, admin_path: Path, db_info: dict): + """Upisuje tabele i kolone iz izvorne baze u Jam.py admin.sqlite.""" + if not admin_path.exists(): + raise FileNotFoundError(f"admin.sqlite nije pronađen na {admin_path}") + + conn_admin = sqlite3.connect(admin_path) + cur_admin = conn_admin.cursor() + + # brišemo postojeće meta-podatke, ali samo one koje smo prethodno dodali + cur_admin.execute("DELETE FROM `SYS_ITEMS` WHERE `id`>=6;") + cur_admin.execute("DELETE FROM `SYS_FIELDS` WHERE `id`>=12;") + + tables = get_table_names(conn_src) + item_id = common.ITEM_START_ID + field_id = common.FIELD_START_ID + + for tname in tables: + columns = get_table_info(conn_src, tname) + + # Kreiraj zapis za SYS_ITEMS + item_rec = common.build_item_record( + name=tname, + item_id=item_id, + group=db_info.get("group", common.DEFAULT_GROUP), + owner=db_info.get("owner", common.DEFAULT_OWNER) + ) + cur_admin.execute( + """ + INSERT INTO SYS_ITEMS (id, f_name, f_caption, f_group, f_owner, f_info) + VALUES (:id, :f_name, :f_caption, :f_group, :f_owner, :f_info) + """, item_rec) + + # Kreiraj zapise za SYS_FIELDS + for col in columns: + field_rec = common.build_field_record( + table_id=item_id, + name=col["col_name"], + col_type=col["col_type"], + pk=col["pk"], + not_null=col["not_null"], + field_id=field_id + ) + cur_admin.execute( + """ + INSERT INTO SYS_FIELDS + (id, f_table, f_name, f_caption, f_data_type, f_not_null, f_primary_key, f_info) + VALUES (:id, :f_table, :f_name, :f_caption, :f_data_type, :f_not_null, :f_primary_key, :f_info) + """, field_rec) + field_id += 1 + + item_id += 1 + + conn_admin.commit() + conn_admin.close() + common.debug(f"Zapisano {len(tables)} tabela u admin.sqlite ({admin_path})") + +# GLAVNA ENTRY TAČKA +def my_database_procedure(conn: sqlite3.Connection, db_info: dict): + """Glavna procedura - povezuje sve korake.""" + admin_path = Path(db_info.get("admin_path", "admin.sqlite")) + common.debug(f"Pokrećem scaffold za SQLite → {admin_path}") + write_to_admin(conn, admin_path, db_info) + common.debug("Scaffold SQLite završio uspešno.") + +# GLAVNI SCENARIO (CLI FRIENDLY) +if __name__ == "__main__": + # primer testnog pokretanja + info = { + "db": "primer.db", + "admin_path": "admin.sqlite", + "group": 1, + "owner": 1 + } + conn = connect_to_database(info) + my_database_procedure(conn, info) + conn.close() + print("Scaffold SQLite kompletiran.") diff --git a/utils/02 jampy app scaffolding/uputstvo/jampy_app_scaffold.md b/utils/02 jampy app scaffolding/uputstvo/jampy_app_scaffold.md new file mode 100644 index 00000000..3c4d1d66 --- /dev/null +++ b/utils/02 jampy app scaffolding/uputstvo/jampy_app_scaffold.md @@ -0,0 +1,100 @@ + +# Jampy app scaffold + +`scaffold_common.py` je dizajniran da bude baza za sve tvoje buduće `scaffold_{driver}.py` module (SQLite, MySQL, PostgreSQL, itd.). + +## `scaffold_common.py` + +Zajedničke funkcije i konstante za sve Jam.py scaffold_{driver} module. + +### Kako ga koristiš + +U svakom `scaffold_{driver}.py` (recimo `scaffold_sqlite3.py`) samo na vrhu dodaš: + +```python +import scaffold_common as common +``` + +I onda sve svoje stare pozive zameniš ovako: + +```python +field = common.build_field_record(...) +item = common.build_item_record(...) +dtype = common.get_f_data_type(sql_type) +``` + +To znači da: + +- nema više duplog koda između SQLite i ostalih verzija, +- sve pomoćne funkcije i konstante su centralizovane, +- lako dodaješ novi `scaffold_postgres.py` bez kopiranja logike. + +## SQLite Jampy app scaffolding + +Koristi `scaffold_common.py` kao zajedničku bazu. + +Ovaj fajl je potpuno funkcionalan, ali daleko čistiji od originala - uklonjeni su svi duplikati, a sve pomoćne funkcije i konstante su povučene iz `scaffold_common.py`. + +Sav kod se nalazi u `scaffold_sqlite3.py`]. + +## Firebird Jampy app scaffolding + +Napravljen po istom modelu kao `scaffold_sqlite3.py`, i naravno koristi isti `scaffold_common.py`. + +Sav kod se nalazi u `scaffold_firebird.py`. + +## MySQL Jampy app scaffolding + +Napravljen po istom modelu kao `scaffold_sqlite3.py`, i naravno koristi isti `scaffold_common.py`. + +Sav kod se nalazi u `scaffold_mysql.py`. + +## Postgres Jampy app scaffolding + +Napravljen po istom modelu kao `scaffold_sqlite3.py`, i naravno koristi isti `scaffold_common.py`. + +Sav kod se nalazi u `scaffold_postgres.py`. + +### Šta je urađeno + +|Deo | Staro stanje | Novo stanje | +| -------------- | -------------------------- | ------------------- | +| Helperi (`to_camel_case`, `sanitize_field_name`, `get_f_data_type`, …) | duplirani u više fajlova | svi prebačeni u `scaffold_common.py` | +| Konstante (`ITEM_START_ID`, `FIELD_START_ID`, …) | definisane u svakom modulu | centralizovane | +| Logika upisa u `admin.sqlite` | duža i ponavljana | generička funkcija `write_to_admin()` | +| Reusability | teško dodati novi DB | sada se samo zameni `get_table_info()` i `connect_to_database()` | + +## `scaffold_loader.py` + +Evo i poslednjeg dela slagalice — **`scaffold_loader.py`**, univerzalni “registry” koji automatski detektuje drajver i pokreće pravi `scaffold_{driver}.py` modul. + +Sav kod se nalazi u `scaffold_loader.py`. + +## Kako radi + +- Uzme `driver` iz `db_info` (`sqlite`, `mysql`, `postgres`, + `firebird`). +- Importuje odgovarajući modul (`scaffold_sqlite3`, `scaffold_mysql`, + `scaffold_postgres`, `scaffold_firebird`). +- Pozove: + - `connect_to_database(db_info)` + - `my_database_procedure(conn, db_info)` + +- Zatvori konekciju i odštampa uredne debug poruke. + +--- + +## Kompletan sistem + +```sh +scaffold_common.py +scaffold_sqlite3.py +scaffold_mysql.py +scaffold_postgres.py +scaffold_firebird.py +scaffold_loader.py +``` + +Svi zajedno čine "univerzalni Jam.py scaffold" engine koji može da pročita bilo koju od `SQLIte3`, `Firebird`, `MyQSL` i `PostgreSQL` bazu i napuni `admin.sqlite` bez da se išta menja u kodu. + +Sve centralizovano, modularno i skalabilno (MS SQL Server i Oracle).