Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
Gyuricska Milán
/
cloud
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Snippets
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
a228d7ae
authored
Apr 27, 2016
by
Czémán Arnold
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
storage, vm: remove redundant code and small fix
parent
feae17a6
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
21 additions
and
20 deletions
+21
-20
circle/storage/models.py
+9
-1
circle/storage/tasks/periodic_tasks.py
+9
-16
circle/storage/tasks/storage_tasks.py
+2
-2
circle/vm/models/instance.py
+1
-1
No files found.
circle/storage/models.py
View file @
a228d7ae
...
...
@@ -139,6 +139,11 @@ class DataStore(Model):
@method_cache
(
30
)
def
get_orphan_disks
(
self
,
timeout
=
15
):
"""Disk image files without Disk object in the database.
Exclude cloud-xxxxxxxx.dump format images.
:param timeout: Seconds before TimeOut exception
:type timeout: int
"""
queue_name
=
self
.
get_remote_queue_name
(
'storage'
,
"slow"
)
files
=
set
(
storage_tasks
.
list_files
.
apply_async
(
...
...
@@ -155,6 +160,9 @@ class DataStore(Model):
@method_cache
(
30
)
def
get_missing_disks
(
self
,
timeout
=
15
):
"""Disk objects without disk image files.
:param timeout: Seconds before TimeOut exception
:type timeout: int
"""
queue_name
=
self
.
get_remote_queue_name
(
'storage'
,
"slow"
)
files
=
set
(
storage_tasks
.
list_files
.
apply_async
(
...
...
@@ -599,7 +607,7 @@ class Disk(TimeStampedModel):
"""
queue_name
=
self
.
datastore
.
get_remote_queue_name
(
'storage'
,
priority
=
'slow'
)
res
=
storage_tasks
.
is_
exists
.
apply_async
(
res
=
storage_tasks
.
exists
.
apply_async
(
args
=
[
self
.
datastore
.
type
,
self
.
datastore
.
path
,
self
.
filename
],
...
...
circle/storage/tasks/periodic_tasks.py
View file @
a228d7ae
...
...
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
@celery.task
def
garbage_collector
(
timeout
=
15
):
def
garbage_collector
(
timeout
=
15
,
percent
=
10
):
""" Garbage collector for disk images.
If there is not enough free space on datastore (default 10
%
)
...
...
@@ -45,9 +45,11 @@ def garbage_collector(timeout=15):
logger
.
info
(
"Image:
%
s at Datastore:
%
s fetch for destroy."
%
(
i
,
ds
.
path
))
try
:
storage_tasks
.
make_free_space
.
apply_async
(
args
=
[
ds
.
type
,
ds
.
path
,
deletable_disks
],
s
uccess
=
s
torage_tasks
.
make_free_space
.
apply_async
(
args
=
[
ds
.
type
,
ds
.
path
,
deletable_disks
,
percent
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
)
if
not
success
:
logger
.
warning
(
"Has no deletable disk."
)
except
Exception
as
e
:
logger
.
warning
(
str
(
e
))
...
...
@@ -59,15 +61,11 @@ def list_orphan_disks(timeout=15):
Exclude cloud-xxxxxxxx.dump format images.
:param timeout: Seconds before TimeOut exception
:type timeo
i
t: int
:type timeo
u
t: int
"""
import
re
for
ds
in
DataStore
.
objects
.
all
():
queue_name
=
ds
.
get_remote_queue_name
(
'storage'
,
"slow"
)
files
=
set
(
storage_tasks
.
list_files
.
apply_async
(
args
=
[
ds
.
type
,
ds
.
path
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
))
disks
=
set
([
disk
.
filename
for
disk
in
ds
.
disk_set
.
all
()])
for
i
in
files
-
disks
:
for
i
in
ds
.
get_orphan_disks
(
timeout
=
timeout
):
if
not
re
.
match
(
'cloud-[0-9]*
\
.dump'
,
i
):
logging
.
warning
(
"Orphan disk:
%
s"
%
i
)
...
...
@@ -77,14 +75,9 @@ def list_missing_disks(timeout=15):
"""List Disk objects without disk image files.
:param timeout: Seconds before TimeOut exception
:type timeo
i
t: int
:type timeo
u
t: int
"""
for
ds
in
DataStore
.
objects
.
all
():
queue_name
=
ds
.
get_remote_queue_name
(
'storage'
,
"slow"
)
files
=
set
(
storage_tasks
.
list_files
.
apply_async
(
args
=
[
ds
.
type
,
ds
.
path
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
))
disks
=
set
([
disk
.
filename
for
disk
in
ds
.
disk_set
.
filter
(
destroyed__isnull
=
True
)])
for
i
in
disks
-
files
:
for
i
in
ds
.
get_missing_disks
(
timeout
=
timeout
):
logging
.
critical
(
"Image:
%
s is missing from
%
s datastore."
%
(
i
,
ds
.
path
))
circle/storage/tasks/storage_tasks.py
View file @
a228d7ae
...
...
@@ -63,8 +63,8 @@ def merge(src_disk_desc, dst_disk_desc):
pass
@celery.task
(
name
=
'storagedriver.
is_
exists'
)
def
is_
exists
(
data_store_type
,
path
,
disk_name
):
@celery.task
(
name
=
'storagedriver.exists'
)
def
exists
(
data_store_type
,
path
,
disk_name
):
pass
...
...
circle/vm/models/instance.py
View file @
a228d7ae
...
...
@@ -429,7 +429,7 @@ class Instance(AclBase, VirtualMachineDescModel, StatusModel, OperatedMixin,
common_fields
=
[
'name'
,
'description'
,
'num_cores'
,
'ram_size'
,
'max_ram_size'
,
'arch'
,
'priority'
,
'boot_menu'
,
'raw_data'
,
'lease'
,
'access_method'
,
'system'
,
'has_agent'
]
'has_agent'
,
'datastore'
]
params
=
dict
(
template
=
template
,
owner
=
owner
,
pw
=
pwgen
())
params
.
update
([(
f
,
getattr
(
template
,
f
))
for
f
in
common_fields
])
params
.
update
(
kwargs
)
# override defaults w/ user supplied values
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment