Add optional JSON object .import to POST /api/teleporter that allows the user to pick what is to be restored

Signed-off-by: DL6ER <dl6er@dl6er.de>
This commit is contained in:
DL6ER 2024-02-03 19:33:34 +01:00
parent 04cf2e831d
commit 714f14babb
No known key found for this signature in database
GPG Key ID: 00135ACBD90B28DD
5 changed files with 205 additions and 27 deletions

View File

@ -42,6 +42,50 @@ components:
file:
type: string
format: binary
import:
type: object
nullable: true
properties:
config:
type: boolean
description: "Import Pi-hole configuration"
example: true
dhcp_leases:
type: boolean
description: "Import Pi-hole DHCP leases"
example: true
gravity:
type: object
properties:
group:
type: boolean
description: "Import Pi-hole's groups table"
example: true
adlist:
type: boolean
description: "Import Pi-hole's adlist table"
example: true
adlist_by_group:
type: boolean
description: "Import Pi-hole's table relating adlist entries to groups"
example: true
domainlist:
type: boolean
description: "Import Pi-hole's domainlist table"
example: true
domainlist_by_group:
type: boolean
description: "Import Pi-hole's table relating domainlist entries to groups"
example: true
client:
type: boolean
description: "Import Pi-hole's client table"
example: true
client_by_group:
type: boolean
description: "Import Pi-hole's table relating client entries to groups"
example: true
description: "A JSON object of files to import. If omitted, all files will be imported."
responses:
'200':
description: OK

View File

@ -68,14 +68,18 @@ static int api_teleporter_GET(struct ftl_conn *api)
struct upload_data {
bool too_large;
char *sid;
cJSON *import;
uint8_t *data;
char *filename;
size_t filesize;
struct {
bool file;
bool sid;
bool import;
} field;
};
// Callback function for CivetWeb to determine which fields we want to receive
static bool is_file = false;
static bool is_sid = false;
static int field_found(const char *key,
const char *filename,
char *path,
@ -85,17 +89,22 @@ static int field_found(const char *key,
struct upload_data *data = (struct upload_data *)user_data;
log_debug(DEBUG_API, "Found field: \"%s\", filename: \"%s\"", key, filename);
is_file = false;
is_sid = false;
// Set all fields to false
memset(&data->field, false, sizeof(data->field));
if(strcasecmp(key, "file") == 0 && filename && *filename)
{
data->filename = strdup(filename);
is_file = true;
data->field.file = true;
return MG_FORM_FIELD_STORAGE_GET;
}
else if(strcasecmp(key, "sid") == 0)
{
is_sid = true;
data->field.sid = true;
return MG_FORM_FIELD_STORAGE_GET;
}
else if(strcasecmp(key, "import") == 0)
{
data->field.import = true;
return MG_FORM_FIELD_STORAGE_GET;
}
@ -111,7 +120,7 @@ static int field_get(const char *key, const char *value, size_t valuelen, void *
struct upload_data *data = (struct upload_data *)user_data;
log_debug(DEBUG_API, "Received field: \"%s\" (length %zu bytes)", key, valuelen);
if(is_file)
if(data->field.file)
{
if(data->filesize + valuelen > MAXFILESIZE)
{
@ -129,7 +138,7 @@ static int field_get(const char *key, const char *value, size_t valuelen, void *
log_debug(DEBUG_API, "Received file (%zu bytes, buffer is now %zu bytes)",
valuelen, data->filesize);
}
else if(is_sid)
else if(data->field.sid)
{
// Allocate memory for the SID
data->sid = calloc(valuelen + 1, sizeof(char));
@ -138,6 +147,27 @@ static int field_get(const char *key, const char *value, size_t valuelen, void *
// Add terminating NULL byte (memcpy does not do this)
data->sid[valuelen] = '\0';
}
else if(data->field.import)
{
// Try to parse the JSON data
cJSON *json = cJSON_ParseWithLength(value, valuelen);
if(json == NULL)
{
log_err("Unable to parse JSON data in API request: %s", cJSON_GetErrorPtr());
return MG_FORM_FIELD_HANDLE_ABORT;
}
// Check if the JSON data is an object
if(!cJSON_IsObject(json))
{
log_err("JSON data in API request is not an object");
cJSON_Delete(json);
return MG_FORM_FIELD_HANDLE_ABORT;
}
// Store the parsed JSON data
data->import = json;
}
// If there is more data in this field, get the next chunk.
// Otherwise: handle the next field.
@ -168,6 +198,11 @@ static int free_upload_data(struct upload_data *data)
free(data->data);
data->data = NULL;
}
if(data->import)
{
cJSON_Delete(data->import);
data->import = NULL;
}
return 0;
}
@ -262,7 +297,7 @@ static int process_received_zip(struct ftl_conn *api, struct upload_data *data)
char hint[ERRBUF_SIZE];
memset(hint, 0, sizeof(hint));
cJSON *json_files = JSON_NEW_ARRAY();
const char *error = read_teleporter_zip(data->data, data->filesize, hint, json_files);
const char *error = read_teleporter_zip(data->data, data->filesize, hint, data->import, json_files);
if(error != NULL)
{
const size_t msglen = strlen(error) + strlen(hint) + 4;
@ -277,7 +312,7 @@ static int process_received_zip(struct ftl_conn *api, struct upload_data *data)
free_upload_data(data);
return send_json_error_free(api, 400,
"bad_request",
"Invalid ZIP archive",
"Invalid request",
msg, true);
}
@ -632,14 +667,34 @@ static int process_received_tar_gz(struct ftl_conn *api, struct upload_data *dat
// Parse JSON files in the TAR archive
cJSON *imported_files = JSON_NEW_ARRAY();
for(size_t i = 0; i < sizeof(teleporter_v5_files) / sizeof(struct teleporter_files); i++)
// Check if the archive contains gravity tables
cJSON *gravity = data->import != NULL ? cJSON_GetObjectItemCaseSensitive(data->import, "gravity") : NULL;
if(data->import == NULL || gravity != NULL)
{
size_t fileSize = 0u;
cJSON *json = NULL;
const char *file = find_file_in_tar(archive, archive_size, teleporter_v5_files[i].filename, &fileSize);
if(file != NULL && fileSize > 0u && (json = cJSON_ParseWithLength(file, fileSize)) != NULL)
if(import_json_table(json, &teleporter_v5_files[i]))
JSON_COPY_STR_TO_ARRAY(imported_files, teleporter_v5_files[i].filename);
for(size_t i = 0; i < sizeof(teleporter_v5_files) / sizeof(struct teleporter_files); i++)
{
// - if import is NULL we import all files/tables
// - if import is non-NULL, but gravity is NULL we skip
// the import of gravity tables
// - if import is non-NULL, and gravity is non-NULL, we
// import the file/table if it is in the object, a
// boolean and true
if(data->import != NULL || gravity == NULL || !JSON_KEY_TRUE(gravity, teleporter_v5_files[i].table_name))
{
log_info("Skipping import of \"%s\" as it was not requested for import",
teleporter_v5_files[i].filename);
continue;
}
// Import the JSON file
size_t fileSize = 0u;
cJSON *json = NULL;
const char *file = find_file_in_tar(archive, archive_size, teleporter_v5_files[i].filename, &fileSize);
if(file != NULL && fileSize > 0u && (json = cJSON_ParseWithLength(file, fileSize)) != NULL)
if(import_json_table(json, &teleporter_v5_files[i]))
JSON_COPY_STR_TO_ARRAY(imported_files, teleporter_v5_files[i].filename);
}
}
// Temporarily write further files to to disk so we can import them on restart
@ -648,15 +703,19 @@ static int process_received_tar_gz(struct ftl_conn *api, struct upload_data *dat
const char *destination;
} extract_files[] = {
{
// i = 0
.archive_name = "custom.list",
.destination = DNSMASQ_CUSTOM_LIST_LEGACY
},{
// i = 1
.archive_name = "dhcp.leases",
.destination = DHCPLEASESFILE
},{
// i = 2
.archive_name = "pihole-FTL.conf",
.destination = GLOBALCONFFILE_LEGACY
},{
// i = 3
.archive_name = "setupVars.conf",
.destination = config.files.setupVars.v.s
}
@ -665,6 +724,21 @@ static int process_received_tar_gz(struct ftl_conn *api, struct upload_data *dat
{
size_t fileSize = 0u;
const char *file = find_file_in_tar(archive, archive_size, extract_files[i].archive_name, &fileSize);
if(data->import != NULL && i == 1 && !JSON_KEY_TRUE(data->import, "dhcp_leases"))
{
log_info("Skipping import of \"%s\" as it was not requested for import",
extract_files[i].archive_name);
continue;
}
// all other values of i belong to config files
else if(data->import != NULL && !JSON_KEY_TRUE(data->import, "config"))
{
log_info("Skipping import of \"%s\" as it was not requested for import",
extract_files[i].archive_name);
continue;
}
if(file != NULL && fileSize > 0u)
{
// Write file to disk

View File

@ -254,3 +254,9 @@
#define JSON_INCREMENT_NUMBER(number_obj, inc)({ \
cJSON_SetNumberHelper(number_obj, number_obj->valuedouble + inc); \
})
// Returns true if the key exists and is true, otherwise false
#define JSON_KEY_TRUE(obj, key)({ \
cJSON *elem = cJSON_GetObjectItemCaseSensitive(obj, key); \
elem != NULL ? cJSON_IsTrue(elem) : false; \
})

View File

@ -37,11 +37,11 @@
#include "webserver/cJSON/cJSON.h"
// set_event()
#include "events.h"
// JSON_KEY_TRUE
#include "webserver/json_macros.h"
// Tables to copy from the gravity database to the Teleporter database
static const char *gravity_tables[] = {
"info",
"group",
"adlist",
"adlist_by_group",
@ -365,7 +365,7 @@ static const char *import_dhcp_leases(void *ptr, size_t size, char * const hint)
}
static const char *test_and_import_database(void *ptr, size_t size, const char *destination,
const char **tables, const unsigned int num_tables,
const char **tables, const size_t num_tables,
char * const hint)
{
// Check if the file is empty
@ -523,7 +523,7 @@ static const char *test_and_import_database(void *ptr, size_t size, const char *
return NULL;
}
const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char * const hint, cJSON *imported_files)
const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char * const hint, cJSON *import, cJSON *imported_files)
{
// Initialize ZIP archive
mz_zip_archive zip = { 0 };
@ -585,8 +585,16 @@ const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char * con
// Process file
// Is this "etc/pihole/pihole.toml" ?
if(strcmp(file_stat.m_filename, "etc/pihole/pihole.toml") == 0)
if(strcmp(file_stat.m_filename, extract_files[0]) == 0)
{
// Check whether we should import this file
if(import != NULL && !JSON_KEY_TRUE(import, "config"))
{
log_info("Ignoring file %s in Teleporter archive (not in import list)", file_stat.m_filename);
free(ptr);
continue;
}
// Import Pi-hole configuration
memset(hint, 0, ERRBUF_SIZE);
const char *err = test_and_import_pihole_toml(ptr, file_stat.m_uncomp_size, hint);
@ -598,8 +606,16 @@ const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char * con
log_debug(DEBUG_CONFIG, "Imported Pi-hole configuration: %s", file_stat.m_filename);
}
// Is this "etc/pihole/dhcp.leases"?
else if(strcmp(file_stat.m_filename, "etc/pihole/dhcp.leases") == 0)
else if(strcmp(file_stat.m_filename, extract_files[1]) == 0)
{
// Check whether we should import this file
if(import != NULL && !JSON_KEY_TRUE(import, "dhcp_leases"))
{
log_info("Ignoring file %s in Teleporter archive (not in import list)", file_stat.m_filename);
free(ptr);
continue;
}
// Import DHCP leases
memset(hint, 0, ERRBUF_SIZE);
const char *err = import_dhcp_leases(ptr, file_stat.m_uncomp_size, hint);
@ -610,12 +626,50 @@ const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char * con
}
log_debug(DEBUG_CONFIG, "Imported DHCP leases: %s", file_stat.m_filename);
}
else if(strcmp(file_stat.m_filename, "etc/pihole/gravity.db") == 0)
// Is this "etc/pihole/gravity.db"?
else if(strcmp(file_stat.m_filename, extract_files[2]) == 0)
{
// Check whether we should import this file
if(import != NULL && !cJSON_HasObjectItem(import, "gravity"))
{
log_info("Ignoring file %s in Teleporter archive (not in import list)", file_stat.m_filename);
free(ptr);
continue;
}
const char *import_tables[ArraySize(gravity_tables)] = { NULL };
size_t num_tables = 0u;
if(import == NULL)
{
// Import all tables
num_tables = ArraySize(gravity_tables);
memcpy(import_tables, gravity_tables, sizeof(gravity_tables));
}
else
{
// Get object at import.gravity
cJSON *import_gravity = cJSON_GetObjectItem(import, "gravity");
// Check if import.gravity is a JSON object
if(import_gravity == NULL || !cJSON_IsObject(import_gravity))
{
log_warn("Ignoring file %s in Teleporter archive (import.gravity is not a JSON object)", file_stat.m_filename);
free(ptr);
continue;
}
// Import selected tables
for(size_t j = 0; j < ArraySize(gravity_tables); j++)
{
if(JSON_KEY_TRUE(import, gravity_tables[j]))
import_tables[num_tables++] = gravity_tables[j];
}
}
// Import gravity database
memset(hint, 0, ERRBUF_SIZE);
const char *err = test_and_import_database(ptr, file_stat.m_uncomp_size, config.files.gravity.v.s,
gravity_tables, ArraySize(gravity_tables), hint);
import_tables, num_tables, hint);
if(err != NULL)
{
free(ptr);
@ -730,7 +784,7 @@ bool read_teleporter_zip_from_disk(const char *filename)
// Process ZIP archive
char hint[ERRBUF_SIZE] = "";
cJSON *imported_files = cJSON_CreateArray();
const char *error = read_teleporter_zip(ptr, size, hint, imported_files);
const char *error = read_teleporter_zip(ptr, size, hint, NULL, imported_files);
if(error != NULL)
{

View File

@ -15,7 +15,7 @@
const char *generate_teleporter_zip(mz_zip_archive *zip, char filename[128], void **ptr, size_t *size);
bool free_teleporter_zip(mz_zip_archive *zip);
const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char *hint, cJSON *json_files);
const char *read_teleporter_zip(uint8_t *buffer, const size_t buflen, char *hint, cJSON *import, cJSON *json_files);
bool write_teleporter_zip_to_disk(void);
bool read_teleporter_zip_from_disk(const char *filename);