Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
CIRCLE
/
cloud
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
94
Merge Requests
10
Pipelines
Wiki
Snippets
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
17a4af7f
authored
9 years ago
by
Czémán Arnold
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
storage: rework garbage collector logic, synchronize tasks with storagedriver
parent
b02d4a32
Pipeline
#132
passed with stage
in 0 seconds
Changes
3
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
29 additions
and
22 deletions
+29
-22
circle/storage/models.py
+14
-5
circle/storage/tasks/periodic_tasks.py
+11
-8
circle/storage/tasks/storage_tasks.py
+4
-9
No files found.
circle/storage/models.py
View file @
17a4af7f
...
@@ -75,9 +75,11 @@ class DataStore(Model):
...
@@ -75,9 +75,11 @@ class DataStore(Model):
raise
WorkerNotFound
()
raise
WorkerNotFound
()
def
get_deletable_disks
(
self
):
def
get_deletable_disks
(
self
):
return
[
disk
.
filename
for
disk
in
deletables
=
[
disk
for
disk
in
self
.
disk_set
.
filter
(
self
.
disk_set
.
filter
(
destroyed__isnull
=
False
)
if
disk
.
is_deletable
]
destroyed__isnull
=
False
)
if
disk
.
is_deletable
]
deletables
=
sorted
(
deletables
,
key
=
lambda
disk
:
disk
.
destroyed
)
return
[
disk
.
filename
for
disk
in
deletables
]
@method_cache
(
30
)
@method_cache
(
30
)
def
get_statistics
(
self
,
timeout
=
15
):
def
get_statistics
(
self
,
timeout
=
15
):
...
@@ -475,9 +477,16 @@ class Disk(TimeStampedModel):
...
@@ -475,9 +477,16 @@ class Disk(TimeStampedModel):
'storage'
,
priority
=
'slow'
)
'storage'
,
priority
=
'slow'
)
logger
.
info
(
"Image:
%
s at Datastore:
%
s recovered from trash."
%
logger
.
info
(
"Image:
%
s at Datastore:
%
s recovered from trash."
%
(
self
.
filename
,
self
.
datastore
.
path
))
(
self
.
filename
,
self
.
datastore
.
path
))
storage_tasks
.
recover_from_trash
.
apply_async
(
res
=
storage_tasks
.
exists
.
apply_async
(
args
=
[
self
.
datastore
.
path
,
self
.
filename
],
args
=
[
self
.
datastore
.
path
,
self
.
filename
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
)
queue
=
queue_name
)
.
get
(
timeout
=
timeout
)
if
res
:
logger
.
info
(
"Image:
%
s at Datastore:
%
s recovered."
%
(
self
.
filename
,
self
.
datastore
.
path
))
else
:
logger
.
info
(
"Image:
%
s at Datastore:
%
s not recovered."
%
(
self
.
filename
,
self
.
datastore
.
path
))
def
save_as
(
self
,
task
=
None
,
user
=
None
,
task_uuid
=
None
,
timeout
=
300
):
def
save_as
(
self
,
task
=
None
,
user
=
None
,
task_uuid
=
None
,
timeout
=
300
):
"""Save VM as template.
"""Save VM as template.
...
...
This diff is collapsed.
Click to expand it.
circle/storage/tasks/periodic_tasks.py
View file @
17a4af7f
...
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
...
@@ -24,7 +24,7 @@ logger = logging.getLogger(__name__)
@celery.task
@celery.task
def
garbage_collector
(
timeout
=
15
):
def
garbage_collector
(
timeout
=
15
,
percent
=
10
):
""" Garbage collector for disk images.
""" Garbage collector for disk images.
If there is not enough free space on datastore (default 10
%
)
If there is not enough free space on datastore (default 10
%
)
...
@@ -37,16 +37,19 @@ def garbage_collector(timeout=15):
...
@@ -37,16 +37,19 @@ def garbage_collector(timeout=15):
queue_name
=
ds
.
get_remote_queue_name
(
'storage'
,
priority
=
'fast'
)
queue_name
=
ds
.
get_remote_queue_name
(
'storage'
,
priority
=
'fast'
)
files
=
set
(
storage_tasks
.
list_files
.
apply_async
(
files
=
set
(
storage_tasks
.
list_files
.
apply_async
(
args
=
[
ds
.
path
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
))
args
=
[
ds
.
path
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
))
disks
=
set
(
ds
.
get_deletable_disks
()
)
disks
=
ds
.
get_deletable_disks
(
)
queue_name
=
ds
.
get_remote_queue_name
(
'storage'
,
priority
=
'slow'
)
queue_name
=
ds
.
get_remote_queue_name
(
'storage'
,
priority
=
'slow'
)
for
i
in
disks
&
files
:
logger
.
info
(
"Image:
%
s at Datastore:
%
s moved to trash folder."
%
deletable_disks
=
[
disk
for
disk
in
disks
if
disk
in
files
]
for
i
in
deletable_disks
:
logger
.
info
(
"Image:
%
s at Datastore:
%
s fetch for destroy."
%
(
i
,
ds
.
path
))
(
i
,
ds
.
path
))
storage_tasks
.
move_to_trash
.
apply_async
(
args
=
[
ds
.
path
,
i
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
)
try
:
try
:
storage_tasks
.
make_free_space
.
apply_async
(
success
=
storage_tasks
.
make_free_space
.
apply_async
(
args
=
[
ds
.
path
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
)
args
=
[
ds
.
path
,
deletable_disks
,
percent
],
queue
=
queue_name
)
.
get
(
timeout
=
timeout
)
if
not
success
:
logger
.
warning
(
"Has no deletable disk."
)
except
Exception
as
e
:
except
Exception
as
e
:
logger
.
warning
(
str
(
e
))
logger
.
warning
(
str
(
e
))
...
...
This diff is collapsed.
Click to expand it.
circle/storage/tasks/storage_tasks.py
View file @
17a4af7f
...
@@ -63,18 +63,13 @@ def merge(src_disk_desc, dst_disk_desc):
...
@@ -63,18 +63,13 @@ def merge(src_disk_desc, dst_disk_desc):
pass
pass
@celery.task
(
name
=
'storagedriver.
make_free_space
'
)
@celery.task
(
name
=
'storagedriver.
exists
'
)
def
make_free_space
(
datastore
,
percent
):
def
exists
(
path
,
disk_name
):
pass
pass
@celery.task
(
name
=
'storagedriver.move_to_trash'
)
@celery.task
(
name
=
'storagedriver.make_free_space'
)
def
move_to_trash
(
datastore
,
disk_path
):
def
make_free_space
(
path
,
deletable_disks
,
percent
):
pass
@celery.task
(
name
=
'storagedriver.recover_from_trash'
)
def
recover_from_trash
(
datastore
,
disk_path
):
pass
pass
...
...
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment