Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
CIRCLE
/
storagedriver
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
2
Merge Requests
4
Wiki
Snippets
Members
Activity
Graph
Charts
Create a new issue
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
2b4ce60c
authored
Jan 26, 2021
by
Szeberényi Imre
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'export_progress' into 'master'
Export and import disk progress See merge request
!19
parents
8f4918e4
7af8d98d
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
81 additions
and
20 deletions
+81
-20
disk.py
+59
-13
storagedriver.py
+22
-7
No files found.
disk.py
View file @
2b4ce60c
import
shutil
import
json
import
os
import
subprocess
import
logging
...
...
@@ -13,7 +12,7 @@ from time import sleep
from
hashlib
import
md5
import
re
from
requests_toolbelt
import
MultipartEncoder
from
requests_toolbelt
import
MultipartEncoder
,
MultipartEncoderMonitor
import
requests
logger
=
logging
.
getLogger
(
__name__
)
...
...
@@ -268,18 +267,32 @@ class Disk(object):
raise
Exception
(
"Invalid file format. Only qcow and "
"iso files are allowed. Image from:
%
s"
%
url
)
def
import_disk
(
self
,
url
):
def
import_disk
(
self
,
task
,
url
,
parent_id
):
r
=
requests
.
get
(
url
,
stream
=
True
)
clen
=
int
(
r
.
headers
.
get
(
'content-length'
))
downloaded_file
=
os
.
path
.
join
(
self
.
dir
,
url
.
split
(
'/'
)[
-
1
])
percent
=
0
try
:
with
open
(
downloaded_file
,
'wb'
)
as
f
:
for
chunk
in
r
.
iter_content
(
chunk_size
=
8192
):
for
chunk
in
r
.
iter_content
(
chunk_size
=
256
*
1024
):
f
.
write
(
chunk
)
current_size
=
f
.
tell
()
new_percent
=
current_size
*
100
/
clen
if
task
.
is_aborted
():
raise
AbortException
()
if
new_percent
>
percent
:
percent
=
new_percent
task
.
update_state
(
task_id
=
parent_id
,
state
=
task
.
AsyncResult
(
parent_id
)
.
state
,
meta
=
{
'percent'
:
percent
}
)
with
magic
.
Magic
()
as
m
:
ftype
=
m
.
id_filename
(
downloaded_file
)
if
'qcow'
in
ftype
.
lower
():
shutil
.
move
(
downloaded_file
,
self
.
get_path
())
move
(
downloaded_file
,
self
.
get_path
())
else
:
cmdline
=
[
'qemu-img'
,
'convert'
,
...
...
@@ -287,7 +300,17 @@ class Disk(object):
downloaded_file
,
self
.
get_path
()]
subprocess
.
check_output
(
cmdline
)
except
AbortException
:
os
.
unlink
(
downloaded_file
)
if
os
.
path
.
exists
(
self
.
get_path
()):
os
.
unlink
(
self
.
get_path
())
logger
.
info
(
"Import of disk
%
s aborted"
%
self
.
name
)
except
:
os
.
unlink
(
downloaded_file
)
if
os
.
path
.
exists
(
self
.
get_path
()):
os
.
unlink
(
self
.
get_path
())
raise
else
:
os
.
unlink
(
downloaded_file
)
if
not
self
.
check_valid_image
():
...
...
@@ -296,22 +319,45 @@ class Disk(object):
self
.
size
=
Disk
.
get
(
self
.
dir
,
self
.
name
)
.
size
def
export
(
self
,
format
,
exported_name
,
upload_link
):
exported_path
=
self
.
get_path
()
+
'.'
+
format
def
export
(
self
,
task
,
disk_format
,
exported_name
,
upload_link
,
parent_id
):
exported_path
=
self
.
get_path
()
+
'.'
+
disk_
format
cmdline
=
[
'qemu-img'
,
'convert'
,
'-O'
,
format
,
'-O'
,
disk_
format
,
self
.
get_path
(),
exported_path
]
subprocess
.
check_output
(
cmdline
)
size
=
os
.
path
.
getsize
(
exported_path
)
percent
=
[
0
]
def
update_state
(
monitor
):
new_percent
=
monitor
.
bytes_read
*
100
/
size
if
task
.
is_aborted
():
raise
AbortException
()
if
new_percent
>
percent
[
0
]:
percent
[
0
]
=
new_percent
task
.
update_state
(
task_id
=
parent_id
,
state
=
task
.
AsyncResult
(
parent_id
)
.
state
,
meta
=
{
'percent'
:
percent
[
0
]}
)
with
open
(
exported_path
,
'rb'
)
as
exported_disk
:
try
:
m
=
MultipartEncoder
(
{
'data'
:
(
exported_name
+
'.'
+
format
,
exported_disk
)}
e
=
MultipartEncoder
(
{
'data'
:
(
exported_name
+
'.'
+
disk_format
,
exported_disk
,
'application/octet-stream'
)}
)
m
=
MultipartEncoderMonitor
(
e
,
update_state
)
# Force the read function to read more than 8192 bytes,
# which is a hardcoded value in httplib. This increases
# the upload speed. See:
# https://github.com/requests/toolbelt/issues/75
m
.
_read
=
m
.
read
m
.
read
=
lambda
_
:
m
.
_read
(
1024
*
1024
)
response
=
requests
.
post
(
upload_link
,
data
=
m
,
...
...
storagedriver.py
View file @
2b4ce60c
...
...
@@ -41,17 +41,32 @@ class download(AbortableTask):
'checksum'
:
disk
.
checksum
,
}
@celery.task
()
def
import_disk
(
disk_desc
,
url
):
class
import_disk
(
AbortableTask
):
time_limit
=
18000
def
run
(
self
,
**
kwargs
):
disk_desc
=
kwargs
[
"disk_desc"
]
url
=
kwargs
[
"url"
]
parent_id
=
kwargs
[
"task"
]
disk
=
Disk
.
deserialize
(
disk_desc
)
disk
.
import_disk
(
url
)
return
disk
.
size
disk
.
import_disk
(
self
,
url
,
parent_id
)
return
{
"size"
:
disk
.
size
,
"checksum"
:
disk
.
checksum
}
@celery.task
()
def
export
(
disk_desc
,
format
,
exported_name
,
upload_link
):
class
export_disk
(
AbortableTask
):
time_limit
=
18000
def
run
(
self
,
**
kwargs
):
disk_desc
=
kwargs
[
"disk_desc"
]
disk_format
=
kwargs
[
"disk_format"
]
exported_name
=
kwargs
[
"exported_name"
]
upload_link
=
kwargs
[
"upload_link"
]
parent_id
=
kwargs
[
"task"
]
disk
=
Disk
.
deserialize
(
disk_desc
)
disk
.
export
(
format
,
exported_name
,
upload_link
)
disk
.
export
(
self
,
disk_format
,
exported_name
,
upload_link
,
parent_id
)
@celery.task
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment