File size: 7,175 Bytes
e8e1967 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 |
import os,json
import sqlite3
from io import BytesIO,StringIO
import uuid
import tempfile
from datetime import datetime
import mimetypes
mimetypes.add_type("video/webm" ,'.mkv')
mimetypes.add_type("audio/flac",".flac")
DEFAULT_BLOCK_SIZE = 1024*1024*8
class FileS:
def __init__(self, meta, conn):
self.size = meta['length']
self.create = meta['created']
self.modified = meta['modified']
self.mimetype = meta['mimetype']
self.encoding = meta['encoding']
self.parts = json.loads(meta['parts'])
self.conn = conn
self.position = 0
self.buffer = [b'', -1, -1]
def read(self, size=-1):
if size < 0:
size = self.size - self.position
data = b''
while size > 0:
if size < DEFAULT_BLOCK_SIZE and self.buffer[1] <= self.position < self.buffer[2]:
chunk = self.buffer[0]
start = self.position - self.buffer[1]
end = min(start + size, self.buffer[2] - self.buffer[1])
data += chunk[start:end]
size -= end - start
self.position += end - start
else:
part = self._get_next_part()
if not part:
break
cur = self.conn.cursor()
cur.execute('SELECT data FROM datas WHERE uuid=?', (part['uuid'],))
chunk = cur.fetchone()[0]
if size >= DEFAULT_BLOCK_SIZE:
start = self.position % DEFAULT_BLOCK_SIZE
end = start + DEFAULT_BLOCK_SIZE
data += chunk[start:end]
size -= end - start
self.position += end - start
else:
chunk_start = self.position // DEFAULT_BLOCK_SIZE * DEFAULT_BLOCK_SIZE
chunk_end = min(chunk_start + DEFAULT_BLOCK_SIZE, part['end'])
chunk_pos_start = chunk_start - part['start']
chunk_pos_end = chunk_end - part['start']
self.buffer = [chunk[chunk_pos_start:chunk_pos_end], chunk_start, chunk_end]
start = self.position - chunk_start
end = min(start + size, chunk_end - chunk_start)
data += chunk[chunk_pos_start+start:chunk_pos_start+end]
size -= end - start
self.position += end - start
return data
def _get_next_part(self):
for part in self.parts:
if self.position < part['end'] and self.position >= part['start']:
return part
return None
def seek(self, position):
self.position = position
self.buffer = [b'', -1, -1]
def tell(self):
return self.position
class FileSQL3:
def __init__(self, db_path):
self.conn = sqlite3.connect(db_path,check_same_thread=False)
self.conn.row_factory = sqlite3.Row
self._init_tables()
def _init_tables(self):
cur = self.conn.cursor()
cur.execute('''CREATE TABLE IF NOT EXISTS files (
path TEXT PRIMARY KEY,
created TEXT,
modified TEXT,
length INTEGER,
encoding TEXT,
mimetype TEXT,
description TEXT,
parts TEXT)''')
cur.execute('''CREATE TABLE IF NOT EXISTS datas (
uuid TEXT PRIMARY KEY,
data BLOB,
path TEXT,
start INTEGER,
end INTEGER)''')
self.conn.commit()
def get(self, file_path):
cur = self.conn.cursor()
cur.execute('SELECT * FROM files WHERE path=?', (file_path,))
meta = cur.fetchone()
if meta:
return FileS(dict(meta), self.conn)
def putBytes(self, b,p_path,**kws):
f=tempfile.NamedTemporaryFile(delete=False)
f.write(b)
f.close()
self.put(f.name,p_path=p_path,**kws)
os.unlink(f.name)
def put(self, file_path, p_path=None, description=None, block_size=DEFAULT_BLOCK_SIZE):
if not p_path:
p_path = file_path
with open(file_path, "rb") as f:
file_size = os.path.getsize(file_path)
file_created = datetime.fromtimestamp(os.path.getctime(file_path)).isoformat()
file_modified = datetime.fromtimestamp(os.path.getmtime(file_path)).isoformat()
parts = []
start = 0
while start < file_size:
end = min(start + block_size, file_size)
data = f.read(block_size)
data_uuid = str(uuid.uuid4())
parts.append({'uuid': data_uuid, 'start': start, 'end': end})
cur = self.conn.cursor()
cur.execute('INSERT INTO datas (uuid, data, path, start, end) VALUES (?, ?, ?, ?, ?)',
(data_uuid, data, p_path, start, end))
start = end
parts_json = json.dumps(parts)
try:
cur = self.conn.cursor()
mt, ec = mimetypes.guess_type(file_path)
cur.execute('''INSERT INTO files (path, created, modified, length, encoding, mimetype, description, parts)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)''',
(p_path, file_created, file_modified, file_size, ec, mt, description, parts_json))
except sqlite3.IntegrityError:
cur.execute('DELETE FROM files WHERE path=?', (p_path,))
cur.execute('''INSERT INTO files (path, created, modified, length, encoding, mimetype, description, parts)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)''',
(p_path, file_created, file_modified, file_size, ec, mt, description, parts_json))
self.conn.commit()
def update_files_table(self, path, **fields):
cur = self.conn.cursor()
query = "UPDATE files SET "
query += ', '.join([f"{k} = ?" for k in fields.keys()])
query += " WHERE path = ?"
cur.execute(query, (*fields.values(), path))
self.conn.commit()
def search(self, search_string):
cur = self.conn.cursor()
cur.execute('SELECT path FROM files WHERE path LIKE ?', (search_string ,))
return [row['path'] for row in cur.fetchall()]
def delete(self, file_path):
cur = self.conn.cursor()
cur.execute('SELECT parts FROM files WHERE path=?', (file_path,))
parts = cur.fetchone()
if parts:
for part in json.loads(parts['parts']):
cur.execute('DELETE FROM datas WHERE uuid=?', (part['uuid'],))
cur.execute('DELETE FROM files WHERE path=?', (file_path,))
self.conn.commit()
"""
q=FileSQL3("imgs.sql3")
for p in q.search("%"):
f=q.get(p)
tg=open("imgs/"+p,"wb")
tg.write(f.read())
tg.close()
"""
|