Skip to content

Commit

Permalink
Fixed permanent unload of images after housekeeping (happened around …
Browse files Browse the repository at this point in the history
…30 mins of runtime); Crawler does not crawl dot directories anymore (hidden folders)
  • Loading branch information
pronopython committed Jun 28, 2024
1 parent 8be30cc commit ca191ac
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 8 deletions.
12 changes: 11 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -673,7 +673,7 @@ You can customize how the world is layed out via settings in rugivi.conf (not su
| Setting |Default| Description |
|---|---|--|
|crossshapegrow| `False`|`True` = Shape world like a cross. `False` = Shape world like a ball. Earlier versions of RuGiVi had a bug that resulted in a world shaped like a cross / plus sign instead of a round world.|
|nodiagonalgrow| `True`| `True` = Pictures of a set a placed only top/bottom/left/right, never diagonal.|
|nodiagonalgrow| `True`| `True` = Pictures of a set are placed only top/bottom/left/right, never diagonal.|
|organicgrow| `True`| `True` = Picture sets are more sponge-like|
|reachoutantmode| `True`| `True` = Sometimes sets are grown like a spike reaching out from the center|

Expand Down Expand Up @@ -716,6 +716,16 @@ Don't want to use GitHub? You can also contact me via email: pronopython@proton.

# Release Notes

## v0.5.1-alpha

### changed

- Crawler does not crawl dot directories anymore (hidden folders)

### fixed

- Fixed permanent unload of images after housekeeping (happened around 30 mins of runtime)

## v0.5.0-alpha

### added
Expand Down
18 changes: 15 additions & 3 deletions rugivi/crawlers/first_organic/organic_crawler_first_edition.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,10 @@ def crawler_loop(self) -> None:
sleep(1)
self.status = "paused"

if self.running == False:
self.crawler_loop_running = False
return

self.__pause_for_image_queue()

##############################################################################
Expand All @@ -190,6 +194,12 @@ def crawler_loop(self) -> None:
self.status = "crawling for next dir"

current_dir_absolute_path = Path(current_dir).absolute()

if os.path.basename(os.path.normpath(current_dir_absolute_path)).startswith("."):
# hidden dir (started with ".")
self.excludeDirList.append(str(current_dir_absolute_path))
continue

if str(current_dir_absolute_path) in self.dir_and_start_spot:
# already visited according to database
sleep(0.2)
Expand All @@ -203,7 +213,7 @@ def crawler_loop(self) -> None:
skip_dir = True
break
if skip_dir:
sleep(0.2)
sleep(0.0002)
continue

# populating dirAndStartSpot when this is crawler is run for the first time
Expand Down Expand Up @@ -494,7 +504,7 @@ def __find_empty_spots(self, current_dir, neededSpots):
# Looking for a start spot
# Start spots are selected out of border spots (these are always empty)
##############################################################################

self.status = "finding biome (start spot from border spots)"
# fallback: fill start_spot no matter what will happen
start_spot = random.choice(tuple(self.border_spots))

Expand Down Expand Up @@ -578,7 +588,7 @@ def __find_empty_spots(self, current_dir, neededSpots):
##############################################################################
# See if enough empty spots are next to the start spot
##############################################################################

self.status = "finding biome (gather empty spots [0/"+str(neededSpots)+"])"
stack_with_empty_spots_to_check: list = [start_spot]
# stack always holds neighbouring empty spots to be checked
# if they are not that far away so that spots are grouped together
Expand Down Expand Up @@ -674,6 +684,8 @@ def __find_empty_spots(self, current_dir, neededSpots):
found_enough_empty_spots = True
break

self.status = "finding biome (gather empty spots ["+str(len(found_empty_spots))+"/"+str(neededSpots)+"])"

# ... and now look for its neighbours
(start_spot_x_S, start_spot_y_S) = currentSpot

Expand Down
2 changes: 1 addition & 1 deletion rugivi/image_service/streamed_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def increment_age(self) -> None:

def unload_except_thumb(self) -> None:
if self.get_available_quality() >= StreamedImage.QUALITY_GRID:
self._availableQuality = StreamedImage.QUALITY_THUMB
self._available_quality = StreamedImage.QUALITY_THUMB

self._surfaces[1][StreamedImage.SURFACE_SURFACE] = None
self._surfaces[2][StreamedImage.SURFACE_SURFACE] = None
Expand Down
3 changes: 1 addition & 2 deletions rugivi/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,7 @@ def __init__(self, world: World, initial_height : float) -> None:

self.draw_grid = True

self.update_matrix_side_length = 4
self.update_matrix_side_length = 6
self.update_matrix_side_length = 3
for y in range(0, self.update_matrix_side_length):
for x in range(0, self.update_matrix_side_length):
self.update_matrix.append((x, y))
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from setuptools import setup

setup(name='rugivi',
version='0.5.0-alpha.post1',
version='0.5.1-alpha',
description='RuGiVi - Adult Media Landscape Browser',
long_description='RuGiVi enables you to fly over your image and video collection and view thousands of images and video frames at once. Zoom in and out from one image to small thumbnails with your mousewheel in seconds. All images are grouped as you have them on your disk and arranged in a huge landscape. RuGiVi can work with hundred thousand of images at once.',
url='https://github.com/pronopython/rugivi',
Expand Down

0 comments on commit ca191ac

Please sign in to comment.