📋 Commit iniziale con: - ✅ Documentazione unificata in docs/ - ✅ Codice Laravel in netgescon-laravel/ - ✅ Script automazione in scripts/ - ✅ Configurazione sync rsync - ✅ Struttura organizzata e pulita 🔄 Versione: 2025.07.19-1644 🎯 Sistema pronto per Git distribuito |
||
|---|---|---|
| .. | ||
| src | ||
| config.json | ||
| dbcampierelazioni | ||
| mapping_config.json | ||
| README.md | ||
| requirements.txt | ||
--- CONFIGURAZIONE E LOGGING ---
def load_config(config_path): with open(config_path, encoding="utf-8") as f: return json.load(f)
def setup_logging(log_dir, log_file, debug=False): os.makedirs(log_dir, exist_ok=True) log_path = os.path.join(log_dir, log_file) logging.basicConfig( level=logging.DEBUG if debug else logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s", handlers=[ RotatingFileHandler(log_path, maxBytes=510241024, backupCount=3, encoding="utf-8"), logging.StreamHandler() ] )
--- ESTRAZIONE DATI DAI FILE MDB ---
def extract_data_from_mdb(mdb_path, table): """Estrae i dati da una tabella MDB e restituisce un DataFrame.""" try: cmd = ["mdb-export", "-H", "csv", mdb_path, table] result = subprocess.run(cmd, capture_output=True, text=True, check=True) data = pd.read_csv(pd.compat.StringIO(result.stdout), dtype=str) return data except subprocess.CalledProcessError as e: logging.error(f"Errore nell'estrazione dei dati da {table}: {e}") return pd.DataFrame() # Restituisce un DataFrame vuoto in caso di errore
--- INVIO DATI ALL'API ---
def send_to_api(endpoint, token, payload): headers = { 'Authorization': f'Bearer {token}', 'Accept': 'application/json', 'Content-Type': 'application/json' } try: response = requests.post(endpoint, headers=headers, json=payload) response.raise_for_status() # Solleva un'eccezione per risposte di errore logging.info(f"Invio a {endpoint} riuscito: {response.json()}") except requests.exceptions.RequestException as e: logging.error(f"Errore durante l'invio a {endpoint}: {e}")
--- PROCESSO PRINCIPALE ---
def main(): base_dir = os.path.dirname(os.path.abspath(file)) config_path = os.path.join(base_dir, "agent_config.json") cfg = load_config(config_path)
setup_logging(cfg["LogDirectory"], cfg.get("LogFile", "agent.log"), cfg.get("Debug", False))
input_dir = cfg["InputDirectory"]
if not os.path.isdir(input_dir):
logging.error(f"La directory di input non esiste: {input_dir}")
return
for mapping in cfg.get("SyncMappings", []):
table = mapping['table']
endpoint = cfg['ApiBaseUrl'] + mapping['endpoint']
token = cfg['AdminToken']
# Estrai i dati dalla tabella MDB
mdb_files = [f for f in os.listdir(input_dir) if f.endswith('.mdb')]
for mdb_file in mdb_files:
mdb_path = os.path.join(input_dir, mdb_file)
data = extract_data_from_mdb(mdb_path, table)
# Filtra i dati se necessario (es. rimuovi righe vuote)
if not data.empty:
payload = data.to_dict(orient='records') # Converte il DataFrame in un elenco di dizionari
send_to_api(endpoint, token, payload)
if name == "main": main()
### Explanation of the Script:
1. **Configuration and Logging**: The script loads configuration settings from `agent_config.json` and sets up logging.
2. **Data Extraction**: The `extract_data_from_mdb` function uses `mdb-export` to extract data from the specified MDB table and returns it as a Pandas DataFrame.
3. **Sending Data to API**: The `send_to_api` function sends the extracted data to the specified API endpoint using a POST request.
4. **Main Process**: The `main` function orchestrates the extraction and sending of data. It iterates through the MDB files in the specified input directory, extracts data from the relevant tables, and sends it to the API.
### Notes:
- Ensure that the `mdb-export` tool is installed and accessible in your environment.
- Adjust the filtering logic as needed based on your data cleaning requirements.
- Make sure to handle any specific error cases or logging as per your needs.