Skip to content

Commit

Permalink
Make a distinction between read and write locks
Browse files Browse the repository at this point in the history
Allows multiple threads to access same course files if they are just
reading it.
  • Loading branch information
lainets committed Aug 28, 2023
1 parent 01648a6 commit 493f99e
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 17 deletions.
2 changes: 1 addition & 1 deletion access/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def aplus_json(request: HttpRequest, course_key: str) -> HttpResponse:

path, defaults_path, _ = CourseConfig.file_paths(course_key, source=ConfigSource.PUBLISH)

with FileLock(path):
with FileLock(path, write=True):
with open(defaults_path, "w") as f:
json.dump(exercise_defaults, f)

Expand Down
16 changes: 9 additions & 7 deletions builder/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def store(perfmonitor: PerfMonitor, config: CourseConfig) -> bool:
# the lock is released or BUILD_FILELOCK_TIMEOUT seconds has passed (in which case
# the build fails). The likely situation for this blocking is that the copys_async function
# called from the publish function has the lock.
with FileLock(store_path, timeout=settings.BUILD_FILELOCK_TIMEOUT):
with FileLock(store_path, write=True, timeout=settings.BUILD_FILELOCK_TIMEOUT):
build_logger.info("File lock acquired.")

build_logger.info("Copying the built materials")
Expand Down Expand Up @@ -434,11 +434,12 @@ def publish(course_key: str) -> List[str]:
errors.append(f"Failed to load newly built course for this reason: {e}")
logger.warn(f"Failed to load newly built course for this reason: {e}")
else:
renames([
(store_path, prod_path),
(store_defaults_path, prod_defaults_path),
(store_version_path, prod_version_path),
])
with FileLock(prod_path, write=True):
renames([
(store_path, prod_path),
(store_defaults_path, prod_defaults_path),
(store_version_path, prod_version_path),
])

config.save_to_cache(ConfigSource.PUBLISH)
# Copy files back to store so that rsync has files to compare against.
Expand All @@ -449,7 +450,8 @@ def publish(course_key: str) -> List[str]:
(prod_defaults_path, store_defaults_path),
(prod_version_path, store_version_path),
],
lock_path=store_path
read_lock_path=prod_path,
write_lock_path=store_path,
)

# If loading the store version failed or was skipped, try loading from the publish directory
Expand Down
27 changes: 18 additions & 9 deletions util/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
Utility functions for exercise files.
'''
from contextlib import nullcontext
from contextlib import ExitStack
import fcntl
from pathlib import Path
import os
Expand Down Expand Up @@ -85,12 +85,21 @@ def rm_paths_async(paths: List[Union[str, Path]]) -> None:


@task()
def copys_async(pairs: List[Tuple[PathLike, PathLike]], *, lock_path: Optional[PathLike] = None) -> None:
def copys_async(
pairs: List[Tuple[PathLike, PathLike]],
*,
read_lock_path: Optional[PathLike] = None,
write_lock_path: Optional[PathLike] = None,
) -> None:
"""Copies a list of files and directories asynchronously.
Note that the copying might fail, and the caller wont know about it
due to the asynchronousity"""
with FileLock(lock_path) if lock_path is not None else nullcontext():
with ExitStack() as stack:
if write_lock_path is not None:
stack.enter_context(FileLock(write_lock_path, write=True))
if read_lock_path is not None:
stack.enter_context(FileLock(read_lock_path))
for src, dst in pairs:
if os.path.isdir(src):
copytree(src, dst)
Expand Down Expand Up @@ -305,31 +314,31 @@ class FileLock:
__enter__ may raise OSError for non-blocking access on a locked file
or TimeoutError if obtaining a lock takes too long.
"""
def __init__(self, path: PathLike, timeout: Optional[int] = None):
def __init__(self, path: PathLike, write: bool = False, timeout: Optional[int] = None):
self.path = os.fspath(path) + ".lock"
self.timeout = timeout
self.lock_flag = fcntl.LOCK_EX if write else fcntl.LOCK_SH

def __enter__(self):
self.lockfile = open(self.path, "w")

if self.timeout is None:
fcntl.flock(self.lockfile, fcntl.LOCK_EX)
fcntl.flock(self.lockfile, self.lock_flag)
else:
# we would use a signal to timeout but it can only be used on the main thread
e = _try_flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
e = _try_flock(self.lockfile, self.lock_flag | fcntl.LOCK_NB)
if e:
for _ in range(self.timeout):
time.sleep(1)
e = _try_flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
e = _try_flock(self.lockfile, self.lock_flag | fcntl.LOCK_NB)
if not e:
break
else:
raise e


return self.lockfile

def __exit__(self, etype: Optional[Type[Exception]], e: Optional[Exception], traceback: TracebackType):
def __exit__(self, etype: Optional[Type[BaseException]], e: Optional[BaseException], tb: Optional[TracebackType]):
try:
os.unlink(self.path)
except:
Expand Down

0 comments on commit 493f99e

Please sign in to comment.