我有 50 张尺寸为 1028x1028 的图像。我试图通过从 50 张图像中随机抽取一些补丁来制作一本字典。
这是我的代码=>
from os import listdir
from time import time
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
from sklearn.decomposition import MiniBatchDictionaryLearning
from sklearn.feature_extraction.image import extract_patches_2d
from sklearn.feature_extraction.image import reconstruct_from_patches_2d
from sklearn.utils.fixes import sp_version
from sklearn.datasets import load_sample_image
from scipy import ndimage
from skimage import color
from skimage import io
from PIL import Image
from sklearn.decomposition import SparseCoder
from sklearn.decomposition import sparse_encode
from skimage import data,restoration
from scipy.misc import imfilter, imread
from scipy.signal import convolve2d as conv2
import sys
from sklearn.feature_extraction import image
x = []
path = 'resize/'
c=0
for e in listdir(path):
matrix = np.asarray(Image.open(path+e).convert('L'))
x.append(matrix)
images = np.array(x)
input = np.asarray(Image.open('H03.bmp').convert('L'))
height , width = input.shape
patchsize = (7,14)
patches = image.PatchExtractor((7,14),10000,10).transform(images)
print(patches.shape)
data = patches.reshape(patches.shape[0], -1)
n_iter = 1000
dico = MiniBatchDictionaryLearning(n_components=100,alpha=3,n_iter=n_iter)
V = dico.fit_transform(data).components_
但在最后一行我收到 MemoryError。这是错误=>
(480000, 7, 14) Traceback (most recent call last): File "new.py", line 63, in
V = dico.fit_transform(data).components_ File "/usr/local/lib/python3.4/dist-packages/sklearn/base.py", line 494, in fit_transform
return self.fit(X, **fit_params).transform(X) File "/usr/local/lib/python3.4/dist-packages/sklearn/decomposition/dict_learning.py", line 1238, in fit
return_n_iter=True) File "/usr/local/lib/python3.4/dist-packages/sklearn/decomposition/dict_learning.py", line 677, in dict_learning_online
random_state=random_state) File "/usr/local/lib/python3.4/dist-packages/sklearn/utils/extmath.py", line 364, in randomized_svd
power_iteration_normalizer, random_state) File "/usr/local/lib/python3.4/dist-packages/sklearn/utils/extmath.py", line 258, in randomized_range_finder
Q, _ = linalg.lu(safe_sparse_dot(A, Q), permute_l=True) File "/usr/local/lib/python3.4/dist-packages/sklearn/utils/extmath.py", line 189, in safe_sparse_dot
return fast_dot(a, b) MemoryError
我不知道为什么会收到此错误?
最佳答案
正如评论中所讨论的。
MemoryError happens when you run out of memory basically. You don't have enough RAM. What is your system configuration?
OP回复:
I think, you are r8.... if i take few images around 10 images. then its working fine thanks to help. –
基本上用户的 RAM 空间不足。
关于python - 使用 scikit learn 在字典学习中出现内存错误,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/42454204/