history: print history tracking

Signed-off-by: Jordan Ruthe <jordan.ruthe@gmail.com>
This commit is contained in:
jordanruthe 2021-03-11 20:09:25 -05:00 committed by GitHub
parent 7c89756263
commit 9988da72c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 287 additions and 0 deletions

View File

@ -118,6 +118,16 @@ Currently we support Slic3r derivatives and Cura with Cura-Octoprint.
[octoprint_compat] [octoprint_compat]
``` ```
## History
Enables moonraker to track and store print history. To enable this plugin, add
the configuration below.
```
# moonraker.conf
[history]
```
## paneldue ## paneldue
Enables PanelDue display support. The PanelDue should be connected to the Enables PanelDue display support. The PanelDue should be connected to the
host machine, either via the machine's UART GPIOs or through a USB-TTL host machine, either via the machine's UART GPIOs or through a USB-TTL

View File

@ -1607,6 +1607,62 @@ Currently we support Slic3r derivatives and Cura with Cura-Octoprint.
} }
``` ```
## History APIs
The APIs below are avilable when the `[history]` plugin has been configured.
### Get job list
- HTTP command:\
`GET /server/history/list?limit=50&start=50&since=1&before=5&id=1`
- Websocket command:\
`{"jsonrpc":"2.0","method":"server.history.list","id":"1","params":{}}`
All arguments are optional. Arguments are as follows:
`before` All jobs before this UNIX timestamp
`id` ID of job to display. This overrides other arguments.
`limit` Number of prints to return
`since` All jobs after this UNIX timestamp
`start` Record number to start from (i.e. 10 would start at the 10th print)
- Returns an array of jobs that have been printed
```json
{
count: <number of prints>
prints: {
<id>: {
"end_time": <end_time>,
"filament_used": <filament_used>
"filename": <filename>,
"metadata": {}, # Array of file metadata
"print_duration": <print_duration>,
"status": <status>,
"start_time": <start_time>,
"total_duration": <total_duration>
}
}
}
```
### Delete job
- HTTP command:\
`DELETE /server/history/delete?all`
- Websocket command:\
`{"jsonrpc":"2.0","method":"server.history.delete","id":"1","params":{}}`
One argument from below is required:
`all` Set to true to delete all history
`id` Delete specific job
- Returns an array of deleted ids
```json
[
id1,
id2,
...
]
```
## Websocket notifications ## Websocket notifications
Printer generated events are sent over the websocket as JSON-RPC 2.0 Printer generated events are sent over the websocket as JSON-RPC 2.0
notifications. These notifications are sent to all connected clients notifications. These notifications are sent to all connected clients

View File

@ -0,0 +1,221 @@
# History cache for printer jobs
#
# This file may be distributed under the terms of the GNU GPLv3 license.
import json, logging, time
from tornado.ioloop import IOLoop
SAVE_INTERVAL = 5
HIST_NAMESPACE = "history"
JOBS_AUTO_INC_KEY = "history_auto_inc_id"
class History:
def __init__(self, config):
self.server = config.get_server()
self.database = self.server.lookup_plugin("database")
self.gcdb = self.database.wrap_namespace("gcode_metadata",
parse_keys=False)
self.current_job = None
self.current_job_id = None
self.print_stats = {}
self.server.register_event_handler(
"server:klippy_ready", self._init_ready)
self.server.register_event_handler(
"server:status_update", self._status_update)
self.server.register_event_handler(
"server:klippy_disconnect", self._save_job_on_error)
self.server.register_event_handler(
"server:klippy_shutdown", self._save_job_on_error)
self.server.register_endpoint(
"/server/history/delete", ['DELETE'], self._handle_job_delete)
self.server.register_endpoint(
"/server/history/list", ['GET'], self._handle_jobs_list)
self.database.register_local_namespace(HIST_NAMESPACE)
self.history_ns = self.database.wrap_namespace(HIST_NAMESPACE,
parse_keys=False)
if JOBS_AUTO_INC_KEY not in self.database.ns_keys("moonraker"):
self.database.insert_item("moonraker", JOBS_AUTO_INC_KEY, 0)
async def _init_ready(self):
klippy_apis = self.server.lookup_plugin('klippy_apis')
sub = {"print_stats": None}
try:
result = await klippy_apis.subscribe_objects(sub)
except self.server.error as e:
logging.info(f"Error subscribing to print_stats")
self.print_stats = result.get("print_stats", {})
async def _handle_job_delete(self, web_request):
all = web_request.get_boolean("all", False)
id = str(web_request.get_int("id", -1))
if all:
deljobs = []
for job in self.history_ns.keys():
self.delete_job(job, False)
deljobs.append(job)
self.database.update_item("moonraker", JOBS_AUTO_INC_KEY, 0);
self.metadata = []
return deljobs
if id == -1:
raise self.server.error("No ID to delete")
if id not in self.history_ns.keys():
raise self.server.error(f"Invalid job id: {id}")
self.delete_job(id)
return [id]
async def _handle_jobs_list(self, web_request):
id = str(web_request.get_int("id", -1))
if id != "-1":
if id not in self.history_ns:
raise self.server.error(f"Invalid job id: {id}")
return {id: self.history_ns.get(id, {})}
before = web_request.get_float("before", -1)
since = web_request.get_float("since", -1)
limit = web_request.get_int("limit", 50)
start = web_request.get_int("start", 0)
if start > (len(self.history_ns)-1) or len(self.history_ns) == 0:
return {"count": len(self.history_ns), "prints": {}}
i = 0
end_num = len(self.history_ns)
jobs = {}
start_num = 0
for id in self.history_ns.keys():
job = self.history_ns.get(id)
if since != -1 and since > job.get('start_time'):
start_num += 1
continue
if before != -1 and before < job.get('end_time'):
end_num -= 1
continue
if limit != 0 and i >= limit:
continue
if start != 0:
start -= 1
continue
jobs[id] = job
i += 1
return {"count": end_num - start_num, "prints": jobs}
async def _status_update(self, data):
if "print_stats" in data:
ps = data['print_stats']
if "state" in ps:
old_state = self.print_stats['state']
new_state = ps['state']
if new_state is not old_state:
if new_state == "printing" and old_state != "paused":
self.print_stats.update(ps)
self.add_job(PrinterJob(self.print_stats))
elif new_state == "complete" and self.current_job != None:
self.print_stats.update(ps)
self.finish_job("completed", self.print_stats)
elif new_state == "standby" and self.current_job != None:
self.finish_job("cancelled", self.print_stats)
self.print_stats.update(ps)
def _save_job_on_error(self):
if self.current_job != None:
self.save_current_job()
def add_job(self, job):
self.current_job_id = str(self.database.get_item("moonraker",
JOBS_AUTO_INC_KEY))
self.database.update_item("moonraker", JOBS_AUTO_INC_KEY,
int(self.current_job_id)+1)
self.current_job = job
self.grab_job_metadata()
self.history_ns.insert(self.current_job_id, job.get_stats())
def delete_job(self, id, check_metadata=True):
id = str(id)
if id in self.history_ns.keys():
self.history_ns.delete(id)
return
def finish_job(self, status, updates):
if self.current_job == None:
return
self.current_job.finish("completed", self.print_stats)
# Regrab metadata incase metadata wasn't parsed yet due to file upload
self.grab_job_metadata()
self.save_current_job()
self.current_job = None
self.current_job_id = None
def get_job(self, id):
id = str(id)
if id not in self.history_ns.keys():
return None
return self.history_ns.get(id)
def grab_job_metadata(self):
if self.current_job == None:
return
filename = self.current_job.get("filename")
if filename not in self.gcdb:
return
metadata = {k:v for k,v in self.gcdb.get(filename).items()
if k != "thumbnails"}
self.current_job.set("metadata", metadata)
def save_current_job(self):
self.history_ns.update_child(self.current_job_id,
self.current_job.get_stats())
class PrinterJob:
def __init__(self, data={}, file_metadata={}):
self.end_time = None
self.filament_used = 0
self.filename = None
self.metadata = None
self.print_duration = 0
self.status = "in_progress"
self.start_time = time.time()
self.total_duration = 0
self.update_from_ps(data)
def finish(self, status, print_stats={}):
self.end_time = time.time()
self.status = status
self.update_from_ps(print_stats)
def get(self, name):
if not hasattr(self, name):
return None
return getattr(self, name)
def get_stats(self):
return self.__dict__
def set(self, name, val):
if not hasattr(self, name):
return
setattr(self, name, val)
def update_from_ps(self, data):
for i in data:
if hasattr(self, i):
setattr(self, i, data[i])
def load_plugin(config):
return History(config)