Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
CIRCLE
/
storagedriver
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
2
Merge Requests
4
Wiki
Snippets
Members
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
349cb3f5
authored
Jun 02, 2020
by
Szeberényi Imre
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'export_import_disk' into 'master'
Implement export and import disk feature See merge request
!17
parents
faf064b5
ea2cc1cb
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
80 additions
and
15 deletions
+80
-15
.gitignore
+2
-0
disk.py
+57
-8
requirements/base.txt
+1
-0
storagedriver.py
+20
-7
No files found.
.gitignore
View file @
349cb3f5
*.pyc
*.pyc
*.swp
*.swp
.ropeproject
.ropeproject
.vscode
.idea
disk.py
View file @
349cb3f5
...
@@ -11,6 +11,7 @@ from time import sleep
...
@@ -11,6 +11,7 @@ from time import sleep
from
hashlib
import
md5
from
hashlib
import
md5
import
re
import
re
from
requests_toolbelt
import
MultipartEncoder
import
requests
import
requests
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
@@ -19,7 +20,7 @@ re_qemu_img = re.compile(r'(file format: (?P<format>(qcow2|raw))|'
...
@@ -19,7 +20,7 @@ re_qemu_img = re.compile(r'(file format: (?P<format>(qcow2|raw))|'
r'virtual size: \w+ \((?P<size>[0-9]+) bytes\)|'
r'virtual size: \w+ \((?P<size>[0-9]+) bytes\)|'
r'backing file: \S+ \(actual path: (?P<base>\S+)\))$'
)
r'backing file: \S+ \(actual path: (?P<base>\S+)\))$'
)
maximum_size
=
float
(
os
.
getenv
(
"DOWNLOAD_MAX_SIZE"
,
1024
*
1024
*
1024
*
10
))
maximum_size
=
float
(
os
.
getenv
(
"DOWNLOAD_MAX_SIZE"
,
1024
*
1024
*
1024
*
10
))
class
AbortException
(
Exception
):
class
AbortException
(
Exception
):
...
@@ -31,11 +32,10 @@ class FileTooBig(Exception):
...
@@ -31,11 +32,10 @@ class FileTooBig(Exception):
class
Disk
(
object
):
class
Disk
(
object
):
""" Storage driver DISK object.
''' Storage driver DISK object.
Handle qcow2, raw and iso images.
Handle qcow2, raw and iso images.
TYPES, CREATE_TYPES, SNAPSHOT_TYPES are hand managed restrictions.
TYPES, CREATE_TYPES, SNAPSHOT_TYPES are hand managed restrictions.
'''
"""
TYPES
=
[
'snapshot'
,
'normal'
]
TYPES
=
[
'snapshot'
,
'normal'
]
FORMATS
=
[
'qcow2'
,
'raw'
,
'iso'
]
FORMATS
=
[
'qcow2'
,
'raw'
,
'iso'
]
CREATE_FORMATS
=
[
'qcow2'
,
'raw'
]
CREATE_FORMATS
=
[
'qcow2'
,
'raw'
]
...
@@ -100,8 +100,8 @@ class Disk(object):
...
@@ -100,8 +100,8 @@ class Disk(object):
@classmethod
@classmethod
def
get_legacy
(
cls
,
dir
,
name
):
def
get_legacy
(
cls
,
dir
,
name
):
'''
Create disk from path
"""
Create disk from path
'''
"""
path
=
os
.
path
.
realpath
(
dir
+
'/'
+
name
)
path
=
os
.
path
.
realpath
(
dir
+
'/'
+
name
)
output
=
subprocess
.
check_output
([
'qemu-img'
,
'info'
,
path
])
output
=
subprocess
.
check_output
([
'qemu-img'
,
'info'
,
path
])
...
@@ -266,6 +266,55 @@ class Disk(object):
...
@@ -266,6 +266,55 @@ class Disk(object):
raise
Exception
(
"Invalid file format. Only qcow and "
raise
Exception
(
"Invalid file format. Only qcow and "
"iso files are allowed. Image from:
%
s"
%
url
)
"iso files are allowed. Image from:
%
s"
%
url
)
def
import_disk
(
self
,
url
):
r
=
requests
.
get
(
url
,
stream
=
True
)
downloaded_file
=
os
.
path
.
join
(
self
.
dir
,
url
.
split
(
'/'
)[
-
1
])
with
open
(
downloaded_file
,
'wb'
)
as
f
:
f
.
write
(
r
.
content
)
cmdline
=
[
'qemu-img'
,
'convert'
,
'-O'
,
'qcow2'
,
downloaded_file
,
self
.
get_path
()]
subprocess
.
check_output
(
cmdline
)
os
.
unlink
(
downloaded_file
)
if
not
self
.
check_valid_image
():
os
.
unlink
(
self
.
get_path
())
raise
Exception
(
"Invalid file format."
)
self
.
size
=
Disk
.
get
(
self
.
dir
,
self
.
name
)
.
size
def
export
(
self
,
format
,
exported_name
,
upload_link
):
exported_path
=
self
.
get_path
()
+
'.'
+
format
cmdline
=
[
'qemu-img'
,
'convert'
,
'-O'
,
format
,
self
.
get_path
(),
exported_path
]
subprocess
.
check_output
(
cmdline
)
with
open
(
exported_path
,
'rb'
)
as
exported_disk
:
try
:
m
=
MultipartEncoder
(
{
'data'
:
(
exported_name
+
'.'
+
format
,
exported_disk
)}
)
response
=
requests
.
post
(
upload_link
,
data
=
m
,
headers
=
{
'Content-Type'
:
m
.
content_type
},
params
=
{
'no_redirect'
:
''
}
)
if
response
.
status_code
!=
200
:
raise
Exception
(
"Invalid response status code:
%
s"
%
response
.
status_code
)
finally
:
os
.
unlink
(
exported_path
)
def
extract_iso_from_zip
(
self
,
disk_path
):
def
extract_iso_from_zip
(
self
,
disk_path
):
with
ZipFile
(
disk_path
,
'r'
)
as
z
:
with
ZipFile
(
disk_path
,
'r'
)
as
z
:
isos
=
z
.
namelist
()
isos
=
z
.
namelist
()
...
@@ -285,8 +334,8 @@ class Disk(object):
...
@@ -285,8 +334,8 @@ class Disk(object):
disk_path
)
disk_path
)
def
snapshot
(
self
):
def
snapshot
(
self
):
'''
Creating qcow2 snapshot with base image.
"""
Creating qcow2 snapshot with base image.
'''
"""
# Check if snapshot type and qcow2 format matchmatch
# Check if snapshot type and qcow2 format matchmatch
if
self
.
type
!=
'snapshot'
:
if
self
.
type
!=
'snapshot'
:
raise
Exception
(
'Invalid type:
%
s'
%
self
.
type
)
raise
Exception
(
'Invalid type:
%
s'
%
self
.
type
)
...
...
requirements/base.txt
View file @
349cb3f5
celery==3.1.17
celery==3.1.17
requests==2.5.3
requests==2.5.3
requests-toolbelt==0.9.1
filemagic==1.6
filemagic==1.6
storagedriver.py
View file @
349cb3f5
...
@@ -42,6 +42,19 @@ class download(AbortableTask):
...
@@ -42,6 +42,19 @@ class download(AbortableTask):
@celery.task
()
@celery.task
()
def
import_disk
(
disk_desc
,
url
):
disk
=
Disk
.
deserialize
(
disk_desc
)
disk
.
import_disk
(
url
)
return
disk
.
size
@celery.task
()
def
export
(
disk_desc
,
format
,
exported_name
,
upload_link
):
disk
=
Disk
.
deserialize
(
disk_desc
)
disk
.
export
(
format
,
exported_name
,
upload_link
)
@celery.task
()
def
delete
(
json_data
):
def
delete
(
json_data
):
disk
=
Disk
.
deserialize
(
json_data
)
disk
=
Disk
.
deserialize
(
json_data
)
disk
.
delete
()
disk
.
delete
()
...
@@ -79,7 +92,7 @@ def get(json_data):
...
@@ -79,7 +92,7 @@ def get(json_data):
@celery.task
()
@celery.task
()
def
get_storage_stat
(
path
):
def
get_storage_stat
(
path
):
''' Return free disk space avaliable at path in bytes and percent.'''
""" Return free disk space avaliable at path in bytes and percent."""
s
=
statvfs
(
path
)
s
=
statvfs
(
path
)
all_space
=
s
.
f_bsize
*
s
.
f_blocks
all_space
=
s
.
f_bsize
*
s
.
f_blocks
free_space
=
s
.
f_bavail
*
s
.
f_frsize
free_space
=
s
.
f_bavail
*
s
.
f_frsize
...
@@ -110,8 +123,8 @@ def get_file_statistics(datastore):
...
@@ -110,8 +123,8 @@ def get_file_statistics(datastore):
@celery.task
@celery.task
def
move_to_trash
(
datastore
,
disk_name
):
def
move_to_trash
(
datastore
,
disk_name
):
'''
Move path to the trash directory.
"""
Move path to the trash directory.
'''
"""
trash_path
=
path
.
join
(
datastore
,
trash_directory
)
trash_path
=
path
.
join
(
datastore
,
trash_directory
)
disk_path
=
path
.
join
(
datastore
,
disk_name
)
disk_path
=
path
.
join
(
datastore
,
disk_name
)
if
not
path
.
isdir
(
trash_path
):
if
not
path
.
isdir
(
trash_path
):
...
@@ -122,8 +135,8 @@ def move_to_trash(datastore, disk_name):
...
@@ -122,8 +135,8 @@ def move_to_trash(datastore, disk_name):
@celery.task
@celery.task
def
recover_from_trash
(
datastore
,
disk_name
):
def
recover_from_trash
(
datastore
,
disk_name
):
'''
Recover named disk from the trash directory.
"""
Recover named disk from the trash directory.
'''
"""
if
path
.
exists
(
path
.
join
(
datastore
,
disk_name
)):
if
path
.
exists
(
path
.
join
(
datastore
,
disk_name
)):
return
False
return
False
disk_path
=
path
.
join
(
datastore
,
trash_directory
,
disk_name
)
disk_path
=
path
.
join
(
datastore
,
trash_directory
,
disk_name
)
...
@@ -134,10 +147,10 @@ def recover_from_trash(datastore, disk_name):
...
@@ -134,10 +147,10 @@ def recover_from_trash(datastore, disk_name):
@celery.task
@celery.task
def
make_free_space
(
datastore
,
percent
=
10
):
def
make_free_space
(
datastore
,
percent
=
10
):
'''
Check for free space on datastore.
"""
Check for free space on datastore.
If free space is less than the given percent
If free space is less than the given percent
removes oldest files to satisfy the given requirement.
removes oldest files to satisfy the given requirement.
'''
"""
trash_path
=
path
.
join
(
datastore
,
trash_directory
)
trash_path
=
path
.
join
(
datastore
,
trash_directory
)
def
comp
(
filename
):
def
comp
(
filename
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment