R Under development (unstable) (2019-02-22 r76146) -- "Unsuffered Consequences" Copyright (C) 2019 The R Foundation for Statistical Computing Platform: x86_64-pc-linux-gnu (64-bit) R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. Natural language support but running in an English locale R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for an HTML browser interface to help. Type 'q()' to quit R. > pkgname <- "maxent" > source(file.path(R.home("share"), "R", "examples-header.R")) > options(warn = 1) > library('maxent') Loading required package: SparseM Attaching package: ‘SparseM’ The following object is masked from ‘package:base’: backsolve Loading required package: tm Loading required package: NLP > > base::assign(".oldSearch", base::search(), pos = 'CheckExEnv') > base::assign(".old_wd", base::getwd(), pos = 'CheckExEnv') > cleanEx() > nameEx("NYTimes") > ### * NYTimes > > flush(stderr()); flush(stdout()) > > ### Name: NYTimes > ### Title: a sample dataset containing labeled headlines from The New York > ### Times. > ### Aliases: NYTimes > ### Keywords: datasets > > ### ** Examples > > # READ THE CSV > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > # ALTERNATIVELY, USE THE data() FUNCTION > data(NYTimes) Warning in scan(file = file, what = what, sep = sep, quote = quote, dec = dec, : EOF within quoted string > > > > cleanEx() > nameEx("USCongress") > ### * USCongress > > flush(stderr()); flush(stdout()) > > ### Name: USCongress > ### Title: a sample dataset containing labeled bills from the United State > ### Congress. > ### Aliases: USCongress > ### Keywords: datasets > > ### ** Examples > > # READ THE CSV > data <- read.csv(system.file("data/USCongress.csv.gz",package="maxent")) > # ALTERNATIVELY, USE THE data() FUNCTION > data(USCongress) Warning in scan(file = file, what = what, sep = sep, quote = quote, dec = dec, : EOF within quoted string > > > > cleanEx() > nameEx("as.compressed.matrix") > ### * as.compressed.matrix > > flush(stderr()); flush(stdout()) > > ### Name: as.compressed.matrix > ### Title: converts a tm DocumentTermMatrix or TermDocumentMatrix into a > ### matrix.csr representation. > ### Aliases: as.compressed.matrix > ### Keywords: methods > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # CREATE A MATRIX.CSR (SPARSEM) REPRESENTATION > sparse <- as.compressed.matrix(matrix) > > > > cleanEx() > nameEx("load.model") > ### * load.model > > flush(stderr()); flush(stdout()) > > ### Name: load.model > ### Title: loads a maximum entropy model from a file. > ### Aliases: load.model > ### Keywords: methods > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # TRAIN USING SPARSEM REPRESENTATION > sparse <- as.compressed.matrix(matrix) > model <- maxent(sparse[1:100,],as.factor(data$Topic.Code)[1:100]) > > save.model(model,"myModel") > saved_model <- load.model("myModel") > results <- predict(saved_model,sparse[101:150,]) > > > > cleanEx() > nameEx("maxent-class") > ### * maxent-class > > flush(stderr()); flush(stdout()) > > ### Name: maxent-class > ### Title: an S4 class containing the trained maximum entropy model. > ### Aliases: maxent-class > ### Keywords: classes > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # TRAIN USING SPARSEM REPRESENTATION > sparse <- as.compressed.matrix(matrix) > model <- maxent(sparse[1:100,],as.factor(data$Topic.Code)[1:100]) > class(model) [1] "maxent" attr(,"package") [1] "maxent" > model@model [1] "5\t559\t1.97944\n5\t558\t1.97944\n5\t557\t1.97944\n5\t556\t1.97944\n5\t555\t1.97944\n19\t554\t1.8456\n19\t553\t1.8456\n19\t551\t1.8456\n19\t550\t1.8456\n19\t549\t1.8456\n6\t548\t1.40974\n6\t547\t1.40974\n6\t546\t1.40974\n6\t545\t1.40974\n6\t544\t1.40974\n2\t543\t1.94783\n2\t541\t1.94783\n2\t540\t1.94783\n2\t539\t1.94783\n10\t538\t1.82542\n10\t537\t1.82542\n10\t536\t1.82542\n10\t535\t1.82542\n6\t533\t1.45204\n6\t531\t1.45204\n6\t530\t1.45204\n6\t529\t1.45204\n6\t528\t1.45204\n20\t527\t1.51043\n20\t526\t1.51043\n20\t525\t1.51043\n2\t524\t2.04344\n2\t523\t2.04344\n6\t519\t1.95494\n6\t518\t1.95494\n6\t516\t1.95494\n6\t515\t1.95494\n5\t514\t1.52184\n5\t513\t1.52184\n20\t509\t1.61451\n20\t508\t1.61451\n20\t506\t1.61451\n20\t505\t1.61451\n13\t504\t2.65266\n13\t503\t2.65266\n3\t499\t2.54484\n24\t498\t1.33508\n6\t498\t1.33249\n24\t497\t1.43859\n24\t496\t1.43859\n24\t495\t1.43859\n24\t494\t1.43859\n24\t493\t1.43859\n24\t492\t1.43859\n24\t491\t1.43859\n19\t490\t1.41045\n19\t489\t1.41045\n19\t488\t1.41045\n19\t487\t1.41045\n16\t486\t1.65444\n16\t485\t1.65444\n16\t484\t1.65444\n16\t483\t1.65444\n16\t482\t1.65444\n16\t481\t1.65444\n19\t480\t1.32378\n19\t479\t1.32378\n19\t478\t1.32378\n19\t477\t1.32378\n19\t476\t1.32378\n19\t475\t1.32378\n19\t474\t1.32378\n19\t473\t1.32378\n20\t471\t1.96457\n20\t470\t1.96457\n20\t469\t1.96457\n20\t468\t1.96457\n20\t467\t1.89637\n20\t466\t1.89637\n20\t465\t1.89637\n20\t464\t1.89637\n20\t463\t1.89637\n28\t461\t4.38374\n28\t460\t4.38374\n16\t459\t1.69645\n16\t458\t1.69645\n16\t457\t1.69645\n19\t456\t0.948877\n16\t456\t1.60069\n16\t455\t1.69645\n16\t454\t1.69645\n20\t453\t1.09941\n20\t451\t1.09941\n20\t450\t1.09941\n20\t449\t1.09941\n20\t448\t1.09941\n20\t447\t1.09941\n20\t446\t1.09941\n20\t445\t1.09941\n20\t444\t1.09941\n16\t443\t3.57399\n30\t441\t1.30468\n30\t440\t1.30468\n30\t439\t1.30468\n30\t438\t1.30468\n30\t437\t1.30468\n30\t436\t1.30468\n30\t435\t1.35304\n30\t433\t1.35304\n30\t431\t1.35304\n30\t430\t2.65772\n30\t429\t2.65772\n19\t428\t1.67844\n19\t427\t1.67844\n19\t426\t1.67844\n19\t425\t1.67844\n19\t424\t1.67844\n20\t423\t0.996975\n20\t419\t0.996975\n5\t418\t1.94476\n5\t416\t1.94476\n5\t415\t1.94476\n19\t414\t0.999557\n5\t414\t1.8595\n2\t413\t1.50884\n2\t409\t1.50884\n2\t408\t1.50884\n10\t406\t1.8456\n10\t405\t1.8456\n10\t404\t1.8456\n10\t403\t1.8456\n2\t521\t2.04344\n19\t399\t1.8456\n2\t520\t2.04344\n19\t398\t1.8456\n19\t397\t1.8456\n19\t396\t1.8456\n19\t395\t1.8456\n5\t394\t3.12451\n5\t393\t1.60268\n5\t392\t1.60268\n19\t390\t1.32128\n20\t511\t1.61451\n19\t389\t1.32128\n20\t510\t1.61451\n19\t388\t1.32128\n19\t387\t1.32128\n19\t386\t1.32128\n19\t385\t1.32128\n19\t384\t1.43868\n19\t383\t1.43868\n19\t382\t1.43868\n19\t380\t1.43868\n3\t501\t2.54484\n12\t379\t1.92802\n20\t500\t0.854226\n3\t500\t2.39067\n12\t378\t1.92802\n12\t377\t1.92802\n12\t376\t1.92802\n20\t375\t3.05806\n20\t374\t3.05806\n26\t373\t2.05284\n26\t370\t2.05284\n26\t369\t2.05284\n12\t368\t4.78911\n12\t367\t4.78911\n3\t366\t1.82108\n3\t365\t1.82108\n3\t364\t1.82108\n20\t363\t1.45049\n3\t363\t1.47515\n24\t363\t1.06447\n6\t363\t1.06169\n15\t360\t4.78911\n15\t359\t4.78911\n17\t358\t4.78911\n17\t357\t4.78911\n20\t356\t2.72686\n20\t355\t4.23729\n19\t354\t1.46214\n19\t353\t1.46214\n19\t350\t1.46214\n16\t349\t1.67349\n16\t348\t1.67349\n16\t347\t1.67349\n16\t346\t1.67349\n16\t345\t1.67349\n16\t344\t1.67349\n16\t343\t1.67349\n20\t340\t0.922174\n20\t339\t0.922174\n20\t338\t0.922174\n20\t337\t0.922174\n19\t166\t1.3917\n19\t163\t1.3917\n1\t52\t2.20003\n26\t160\t2.55926\n3\t361\t1.82108\n1\t94\t1.42292\n5\t157\t2.14793\n20\t280\t1.08745\n5\t156\t2.14793\n5\t155\t2.14793\n24\t153\t2.04707\n1\t51\t2.20003\n24\t150\t2.04707\n19\t351\t1.46214\n19\t93\t2.70802\n19\t146\t1.31754\n19\t145\t2.70072\n12\t143\t1.03195\n20\t143\t1.05745\n19\t143\t2.23537\n15\t143\t1.68694\n17\t143\t0.868914\n5\t143\t1.04916\n26\t143\t0.903142\n16\t143\t0.570726\n6\t143\t2.03518\n1\t50\t2.20003\n19\t136\t1.91206\n19\t133\t1.81618\n6\t133\t1.22984\n24\t126\t1.61451\n10\t125\t2.31183\n20\t124\t2.6432\n10\t124\t1.9616\n10\t123\t2.31183\n2\t115\t1.31621\n17\t114\t4.16666\n17\t113\t4.16666\n20\t111\t0.991094\n20\t105\t0.991094\n19\t103\t1.67899\n19\t101\t1.67899\n19\t147\t1.31754\n19\t270\t1.33277\n19\t99\t1.67899\n12\t137\t2.62173\n19\t260\t1.38318\n19\t98\t1.67899\n24\t127\t1.61451\n19\t250\t1.35353\n5\t391\t1.60268\n1\t97\t1.42292\n24\t190\t1.73526\n12\t240\t2.12342\n19\t381\t1.43868\n20\t96\t1.86799\n1\t96\t1.22331\n20\t180\t1.3917\n19\t230\t1.35924\n26\t371\t2.05284\n1\t95\t1.42292\n19\t170\t1.3917\n20\t341\t0.922174\n19\t92\t1.50785\n12\t140\t2.62173\n12\t3\t1.59579\n24\t130\t1.61451\n19\t165\t1.3917\n2\t120\t2.63242\n6\t517\t1.95494\n19\t89\t1.50785\n20\t507\t1.61451\n19\t88\t1.50785\n5\t159\t2.14793\n20\t282\t1.08745\n19\t149\t1.31754\n20\t21\t1.36917\n20\t19\t1.36917\n19\t272\t1.33277\n12\t139\t2.62173\n20\t20\t1.36917\n24\t129\t1.61451\n2\t119\t1.31621\n12\t2\t1.59579\n20\t109\t0.991094\n19\t328\t1.39257\n30\t318\t2.04824\n5\t417\t1.94476\n19\t79\t1.94138\n19\t305\t1.60579\n24\t128\t1.61451\n20\t61\t2.46737\n3\t83\t1.66567\n19\t251\t2.67481\n16\t442\t3.57399\n20\t36\t1.33679\n1\t36\t2.16984\n2\t36\t1.75272\n20\t421\t0.996975\n19\t299\t2.73772\n2\t118\t1.31621\n1\t60\t1.22547\n3\t82\t1.66567\n12\t241\t2.00369\n2\t241\t1.41629\n30\t432\t1.35304\n1\t35\t1.24919\n2\t411\t1.50884\n20\t289\t3.70763\n3\t289\t1.91307\n5\t289\t1.96054\n19\t205\t1.20017\n20\t108\t7.15211\n20\t81\t0.671437\n3\t81\t1.3971\n26\t81\t2.16868\n19\t231\t1.35924\n20\t422\t0.996975\n1\t34\t2.47467\n10\t401\t1.8456\n20\t279\t1.08745\n2\t412\t1.50884\n1\t33\t2.47467\n19\t269\t1.33277\n15\t67\t1.41396\n26\t372\t2.05284\n20\t29\t2.42943\n19\t29\t2.09796\n1\t29\t1.57139\n15\t29\t1.76405\n17\t29\t2.34934\n2\t29\t0.669745\n26\t29\t2.38998\n30\t29\t1.93712\n28\t29\t2.76022\n19\t226\t1.35924\n3\t362\t1.67587\n16\t362\t1.5273\n19\t28\t1.31474\n6\t216\t1.41706\n19\t352\t1.46214\n19\t27\t1.31474\n19\t206\t1.20017\n20\t342\t0.922174\n19\t26\t2.92053\n20\t9\t1.81862\n3\t9\t1.66287\n19\t104\t1.35477\n5\t104\t1.78867\n12\t1\t1.59579\n19\t141\t1.31754\n15\t72\t1.41396\n26\t238\t2.22986\n19\t169\t1.3917\n20\t292\t1.01082\n26\t162\t2.55926\n20\t10\t2.10428\n19\t57\t1.34323\n1\t57\t1.11709\n24\t152\t1.55659\n7\t152\t3.14755\n1\t56\t2.6484\n19\t262\t1.38318\n20\t18\t1.36917\n2\t116\t1.31621\n19\t252\t1.35353\n20\t17\t1.145\n1\t17\t2.10889\n20\t106\t0.991094\n24\t132\t1.61451\n1\t54\t1.22547\n19\t135\t1.91206\n15\t69\t1.41396\n30\t317\t2.04824\n24\t154\t2.04707\n13\t502\t2.65266\n19\t42\t3.56796\n1\t42\t0.880876\n16\t42\t1.37453\n16\t181\t1.94333\n12\t76\t1.60501\n20\t76\t2.13819\n19\t76\t2.31536\n19\t400\t1.8456\n20\t278\t0.996002\n6\t278\t1.33605\n20\t31\t0.881475\n19\t31\t1.15241\n19\t249\t1.35353\n19\t100\t1.67899\n20\t65\t1.86916\n26\t65\t3.11725\n19\t168\t1.3917\n3\t87\t1.4249\n26\t87\t1.93584\n20\t291\t1.01082\n12\t242\t2.12342\n20\t16\t1.36917\n10\t534\t1.82542\n12\t8\t1.16908\n20\t8\t1.53011\n15\t8\t0.920242\n17\t8\t3.30532\n19\t142\t1.31754\n20\t55\t0.28558\n19\t55\t0.922779\n1\t55\t0.742426\n2\t55\t1.87772\n16\t182\t1.94333\n2\t542\t1.94783\n19\t46\t1.66514\n19\t232\t1.35924\n20\t15\t1.36917\n2\t117\t1.31621\n19\t49\t1.66514\n10\t402\t1.8456\n19\t32\t1.31474\n1\t259\t1.41376\n19\t59\t1.21055\n1\t59\t2.43583\n6\t217\t1.41706\n20\t110\t0.991094\n15\t66\t1.41396\n12\t192\t1.79999\n4\t192\t1.73285\n19\t552\t1.8456\n19\t47\t1.66514\n20\t107\t0.991094\n20\t48\t1.95785\n19\t48\t1.39621\n19\t30\t1.31474\n26\t239\t2.22986\n20\t64\t2.27615\n2\t64\t2.23609\n5\t158\t2.14793\n3\t86\t1.66567\n20\t281\t1.08745\n30\t434\t1.35304\n12\t7\t1.59579\n20\t112\t0.991094\n20\t62\t2.46737\n12\t138\t2.62173\n3\t84\t1.66567\n19\t261\t1.38318\n2\t522\t2.04344\n19\t44\t3.5772\n2\t407\t1.50884\n19\t78\t1.94138\n20\t420\t0.996975\n19\t298\t1.34515\n20\t11\t2.10428\n20\t172\t1.3917\n6\t532\t1.45204\n19\t45\t1.66514\n20\t452\t1.09941\n12\t37\t1.82438\n20\t37\t0.500401\n1\t37\t0.98358\n19\t306\t1.60579\n19\t148\t1.31754\n20\t63\t2.46737\n19\t85\t1.33656\n3\t85\t1.56766\n19\t271\t1.33277\n20\t462\t1.74622\n10\t462\t1.63142\n1\t38\t0.975877\n13\t38\t2.60298\n30\t316\t2.04824\n20\t472\t1.96457\n1\t39\t2.35737\n6\t39\t1.31624\n19\t326\t1.39257\n12\t4\t1.59579\n19\t134\t1.91206\n1\t40\t2.33866\n30\t40\t1.08065\n26\t161\t2.55926\n15\t74\t1.41396\n1\t258\t1.41376\n19\t144\t1.31754\n1\t41\t1.14806\n17\t41\t1.89869\n20\t268\t0.377941\n19\t268\t3.06578\n10\t268\t1.41443\n30\t268\t1.59036\n19\t164\t1.3917\n5\t512\t1.52184\n1\t43\t1.24919\n24\t191\t1.73526\n19\t77\t1.94138\n19\t410\t1.30503\n2\t410\t1.43945\n20\t288\t1.01082\n19\t102\t1.67899\n10\t122\t2.31183\n1\t53\t2.20003\n19\t75\t3.01305\n16\t75\t1.70163\n19\t171\t1.3917\n10\t121\t2.31183\n15\t70\t1.41396\n6\t218\t1.41706\n24\t131\t1.61451\n1\t71\t1.30964\n15\t71\t1.3201\n19\t228\t1.35924\n24\t151\t2.04707\n15\t73\t1.25631\n24\t73\t1.97554\n19\t248\t1.35353\n19\t167\t1.3917\n20\t290\t1.01082\n20\t173\t1.3917\n20\t174\t1.3917\n20\t175\t1.3917\n20\t176\t1.3917\n20\t177\t1.3917\n16\t183\t1.94333\n16\t184\t1.94333\n1\t185\t1.7734\n24\t185\t1.56375\n1\t186\t1.13626\n24\t186\t1.63991\n24\t187\t1.73526\n3\t193\t1.83764\n5\t193\t1.26972\n4\t193\t1.52461\n4\t194\t1.92334\n4\t195\t1.92334\n4\t196\t1.92334\n1\t197\t1.69718\n3\t197\t1.65289\n5\t197\t1.12633\n4\t197\t1.36883\n26\t200\t3.55891\n19\t201\t1.20017\n20\t12\t2.10428\n19\t202\t1.20017\n19\t203\t1.20017\n19\t204\t1.20017\n1\t58\t1.22547\n19\t207\t1.20017\n20\t330\t2.77034\n19\t208\t1.20017\n19\t91\t1.50785\n20\t331\t2.77034\n19\t209\t1.20017\n19\t25\t1.56768\n16\t25\t3.4584\n20\t332\t2.77034\n19\t210\t1.20017\n19\t211\t1.20017\n20\t13\t2.10428\n7\t212\t3.51377\n7\t213\t3.51377\n6\t214\t1.41706\n6\t215\t2.8268\n20\t219\t0.907034\n6\t219\t1.36174\n6\t220\t1.41706\n3\t80\t1.66567\n17\t221\t1.93079\n20\t14\t1.36917\n17\t222\t1.93079\n17\t223\t1.93079\n17\t224\t1.93079\n19\t225\t0.92842\n17\t225\t1.84233\n19\t227\t1.35924\n19\t229\t1.35924\n19\t233\t1.35924\n12\t5\t1.59579\n19\t234\t1.35924\n19\t235\t1.35924\n26\t236\t2.22986\n26\t237\t2.22986\n12\t243\t2.12342\n12\t244\t2.12342\n19\t245\t1.35353\n19\t246\t1.35353\n19\t247\t1.35353\n1\t253\t1.41376\n1\t254\t1.41376\n1\t255\t1.41376\n1\t256\t1.41376\n1\t257\t1.41376\n19\t263\t1.38318\n19\t264\t1.38318\n19\t265\t1.38318\n19\t266\t1.33277\n19\t267\t1.33277\n20\t273\t1.28327\n24\t273\t2.10725\n24\t274\t2.20239\n24\t275\t2.20239\n24\t276\t2.20239\n24\t277\t2.20239\n20\t283\t1.08745\n20\t284\t1.08745\n20\t285\t0.504827\n5\t285\t3.0515\n20\t286\t1.01082\n20\t287\t1.01082\n20\t293\t1.01082\n19\t294\t1.34515\n19\t295\t1.34515\n19\t296\t1.34515\n19\t297\t1.34515\n20\t178\t1.3917\n19\t300\t1.34515\n20\t179\t1.3917\n20\t301\t2.1436\n20\t22\t2.46858\n20\t302\t0.934821\n20\t303\t0.934821\n19\t304\t1.60579\n15\t68\t1.41396\n19\t307\t1.60579\n20\t308\t1.20878\n20\t309\t2.13096\n24\t188\t1.73526\n20\t310\t1.20878\n24\t189\t1.73526\n20\t311\t1.20878\n19\t23\t1.31474\n20\t312\t1.20878\n20\t313\t0.928968\n5\t313\t1.4234\n30\t314\t2.04824\n30\t315\t2.04824\n19\t319\t1.30181\n1\t319\t1.83833\n4\t198\t1.92334\n1\t320\t2.0509\n19\t90\t1.50785\n26\t199\t3.55891\n1\t321\t2.0509\n19\t24\t1.31474\n1\t322\t2.0509\n19\t323\t1.39257\n19\t324\t1.39257\n19\t325\t1.39257\n19\t327\t1.39257\n19\t329\t1.39257\n20\t333\t1.39097\n15\t333\t1.82298\n16\t333\t3.17975\n12\t6\t1.59579\n15\t334\t2.93404\n15\t335\t2.93404\n15\t336\t2.93404\n" > model@weights Weight Label Feature 1 1.98 5 559 2 1.98 5 558 3 1.98 5 557 4 1.98 5 556 5 1.98 5 555 6 1.85 19 554 7 1.85 19 553 8 1.85 19 551 9 1.85 19 550 10 1.85 19 549 11 1.41 6 548 12 1.41 6 547 13 1.41 6 546 14 1.41 6 545 15 1.41 6 544 16 1.95 2 543 17 1.95 2 541 18 1.95 2 540 19 1.95 2 539 20 1.83 10 538 21 1.83 10 537 22 1.83 10 536 23 1.83 10 535 24 1.45 6 533 25 1.45 6 531 26 1.45 6 530 27 1.45 6 529 28 1.45 6 528 29 1.51 20 527 30 1.51 20 526 31 1.51 20 525 32 2.04 2 524 33 2.04 2 523 34 1.95 6 519 35 1.95 6 518 36 1.95 6 516 37 1.95 6 515 38 1.52 5 514 39 1.52 5 513 40 1.61 20 509 41 1.61 20 508 42 1.61 20 506 43 1.61 20 505 44 2.65 13 504 45 2.65 13 503 46 2.54 3 499 47 1.34 24 498 48 1.33 6 498 49 1.44 24 497 50 1.44 24 496 51 1.44 24 495 52 1.44 24 494 53 1.44 24 493 54 1.44 24 492 55 1.44 24 491 56 1.41 19 490 57 1.41 19 489 58 1.41 19 488 59 1.41 19 487 60 1.65 16 486 61 1.65 16 485 62 1.65 16 484 63 1.65 16 483 64 1.65 16 482 65 1.65 16 481 66 1.32 19 480 67 1.32 19 479 68 1.32 19 478 69 1.32 19 477 70 1.32 19 476 71 1.32 19 475 72 1.32 19 474 73 1.32 19 473 74 1.96 20 471 75 1.96 20 470 76 1.96 20 469 77 1.96 20 468 78 1.9 20 467 79 1.9 20 466 80 1.9 20 465 81 1.9 20 464 82 1.9 20 463 83 4.38 28 461 84 4.38 28 460 85 1.7 16 459 86 1.7 16 458 87 1.7 16 457 88 0.949 19 456 89 1.6 16 456 90 1.7 16 455 91 1.7 16 454 92 1.1 20 453 93 1.1 20 451 94 1.1 20 450 95 1.1 20 449 96 1.1 20 448 97 1.1 20 447 98 1.1 20 446 99 1.1 20 445 100 1.1 20 444 101 3.57 16 443 102 1.3 30 441 103 1.3 30 440 104 1.3 30 439 105 1.3 30 438 106 1.3 30 437 107 1.3 30 436 108 1.35 30 435 109 1.35 30 433 110 1.35 30 431 111 2.66 30 430 112 2.66 30 429 113 1.68 19 428 114 1.68 19 427 115 1.68 19 426 116 1.68 19 425 117 1.68 19 424 118 0.997 20 423 119 0.997 20 419 120 1.94 5 418 121 1.94 5 416 122 1.94 5 415 123 1 19 414 124 1.86 5 414 125 1.51 2 413 126 1.51 2 409 127 1.51 2 408 128 1.85 10 406 129 1.85 10 405 130 1.85 10 404 131 1.85 10 403 132 2.04 2 521 133 1.85 19 399 134 2.04 2 520 135 1.85 19 398 136 1.85 19 397 137 1.85 19 396 138 1.85 19 395 139 3.12 5 394 140 1.6 5 393 141 1.6 5 392 142 1.32 19 390 143 1.61 20 511 144 1.32 19 389 145 1.61 20 510 146 1.32 19 388 147 1.32 19 387 148 1.32 19 386 149 1.32 19 385 150 1.44 19 384 151 1.44 19 383 152 1.44 19 382 153 1.44 19 380 154 2.54 3 501 155 1.93 12 379 156 0.854 20 500 157 2.39 3 500 158 1.93 12 378 159 1.93 12 377 160 1.93 12 376 161 3.06 20 375 162 3.06 20 374 163 2.05 26 373 164 2.05 26 370 165 2.05 26 369 166 4.79 12 368 167 4.79 12 367 168 1.82 3 366 169 1.82 3 365 170 1.82 3 364 171 1.45 20 363 172 1.48 3 363 173 1.06 24 363 174 1.06 6 363 175 4.79 15 360 176 4.79 15 359 177 4.79 17 358 178 4.79 17 357 179 2.73 20 356 180 4.24 20 355 181 1.46 19 354 182 1.46 19 353 183 1.46 19 350 184 1.67 16 349 185 1.67 16 348 186 1.67 16 347 187 1.67 16 346 188 1.67 16 345 189 1.67 16 344 190 1.67 16 343 191 0.922 20 340 192 0.922 20 339 193 0.922 20 338 194 0.922 20 337 195 1.39 19 166 196 1.39 19 163 197 2.2 1 52 198 2.56 26 160 199 1.82 3 361 200 1.42 1 94 201 2.15 5 157 202 1.09 20 280 203 2.15 5 156 204 2.15 5 155 205 2.05 24 153 206 2.2 1 51 207 2.05 24 150 208 1.46 19 351 209 2.71 19 93 210 1.32 19 146 211 2.7 19 145 212 1.03 12 143 213 1.06 20 143 214 2.24 19 143 215 1.69 15 143 216 0.869 17 143 217 1.05 5 143 218 0.903 26 143 219 0.571 16 143 220 2.04 6 143 221 2.2 1 50 222 1.91 19 136 223 1.82 19 133 224 1.23 6 133 225 1.61 24 126 226 2.31 10 125 227 2.64 20 124 228 1.96 10 124 229 2.31 10 123 230 1.32 2 115 231 4.17 17 114 232 4.17 17 113 233 0.991 20 111 234 0.991 20 105 235 1.68 19 103 236 1.68 19 101 237 1.32 19 147 238 1.33 19 270 239 1.68 19 99 240 2.62 12 137 241 1.38 19 260 242 1.68 19 98 243 1.61 24 127 244 1.35 19 250 245 1.6 5 391 246 1.42 1 97 247 1.74 24 190 248 2.12 12 240 249 1.44 19 381 250 1.87 20 96 251 1.22 1 96 252 1.39 20 180 253 1.36 19 230 254 2.05 26 371 255 1.42 1 95 256 1.39 19 170 257 0.922 20 341 258 1.51 19 92 259 2.62 12 140 260 1.6 12 3 261 1.61 24 130 262 1.39 19 165 263 2.63 2 120 264 1.95 6 517 265 1.51 19 89 266 1.61 20 507 267 1.51 19 88 268 2.15 5 159 269 1.09 20 282 270 1.32 19 149 271 1.37 20 21 272 1.37 20 19 273 1.33 19 272 274 2.62 12 139 275 1.37 20 20 276 1.61 24 129 277 1.32 2 119 278 1.6 12 2 279 0.991 20 109 280 1.39 19 328 281 2.05 30 318 282 1.94 5 417 283 1.94 19 79 284 1.61 19 305 285 1.61 24 128 286 2.47 20 61 287 1.67 3 83 288 2.67 19 251 289 3.57 16 442 290 1.34 20 36 291 2.17 1 36 292 1.75 2 36 293 0.997 20 421 294 2.74 19 299 295 1.32 2 118 296 1.23 1 60 297 1.67 3 82 298 2 12 241 299 1.42 2 241 300 1.35 30 432 301 1.25 1 35 302 1.51 2 411 303 3.71 20 289 304 1.91 3 289 305 1.96 5 289 306 1.2 19 205 307 7.15 20 108 308 0.671 20 81 309 1.4 3 81 310 2.17 26 81 311 1.36 19 231 312 0.997 20 422 313 2.47 1 34 314 1.85 10 401 315 1.09 20 279 316 1.51 2 412 317 2.47 1 33 318 1.33 19 269 319 1.41 15 67 320 2.05 26 372 321 2.43 20 29 322 2.1 19 29 323 1.57 1 29 324 1.76 15 29 325 2.35 17 29 326 0.67 2 29 327 2.39 26 29 328 1.94 30 29 329 2.76 28 29 330 1.36 19 226 331 1.68 3 362 332 1.53 16 362 333 1.31 19 28 334 1.42 6 216 335 1.46 19 352 336 1.31 19 27 337 1.2 19 206 338 0.922 20 342 339 2.92 19 26 340 1.82 20 9 341 1.66 3 9 342 1.35 19 104 343 1.79 5 104 344 1.6 12 1 345 1.32 19 141 346 1.41 15 72 347 2.23 26 238 348 1.39 19 169 349 1.01 20 292 350 2.56 26 162 351 2.1 20 10 352 1.34 19 57 353 1.12 1 57 354 1.56 24 152 355 3.15 7 152 356 2.65 1 56 357 1.38 19 262 358 1.37 20 18 359 1.32 2 116 360 1.35 19 252 361 1.15 20 17 362 2.11 1 17 363 0.991 20 106 364 1.61 24 132 365 1.23 1 54 366 1.91 19 135 367 1.41 15 69 368 2.05 30 317 369 2.05 24 154 370 2.65 13 502 371 3.57 19 42 372 0.881 1 42 373 1.37 16 42 374 1.94 16 181 375 1.61 12 76 376 2.14 20 76 377 2.32 19 76 378 1.85 19 400 379 0.996 20 278 380 1.34 6 278 381 0.881 20 31 382 1.15 19 31 383 1.35 19 249 384 1.68 19 100 385 1.87 20 65 386 3.12 26 65 387 1.39 19 168 388 1.42 3 87 389 1.94 26 87 390 1.01 20 291 391 2.12 12 242 392 1.37 20 16 393 1.83 10 534 394 1.17 12 8 395 1.53 20 8 396 0.92 15 8 397 3.31 17 8 398 1.32 19 142 399 0.286 20 55 400 0.923 19 55 401 0.742 1 55 402 1.88 2 55 403 1.94 16 182 404 1.95 2 542 405 1.67 19 46 406 1.36 19 232 407 1.37 20 15 408 1.32 2 117 409 1.67 19 49 410 1.85 10 402 411 1.31 19 32 412 1.41 1 259 413 1.21 19 59 414 2.44 1 59 415 1.42 6 217 416 0.991 20 110 417 1.41 15 66 418 1.8 12 192 419 1.73 4 192 420 1.85 19 552 421 1.67 19 47 422 0.991 20 107 423 1.96 20 48 424 1.4 19 48 425 1.31 19 30 426 2.23 26 239 427 2.28 20 64 428 2.24 2 64 429 2.15 5 158 430 1.67 3 86 431 1.09 20 281 432 1.35 30 434 433 1.6 12 7 434 0.991 20 112 435 2.47 20 62 436 2.62 12 138 437 1.67 3 84 438 1.38 19 261 439 2.04 2 522 440 3.58 19 44 441 1.51 2 407 442 1.94 19 78 443 0.997 20 420 444 1.35 19 298 445 2.1 20 11 446 1.39 20 172 447 1.45 6 532 448 1.67 19 45 449 1.1 20 452 450 1.82 12 37 451 0.5 20 37 452 0.984 1 37 453 1.61 19 306 454 1.32 19 148 455 2.47 20 63 456 1.34 19 85 457 1.57 3 85 458 1.33 19 271 459 1.75 20 462 460 1.63 10 462 461 0.976 1 38 462 2.6 13 38 463 2.05 30 316 464 1.96 20 472 465 2.36 1 39 466 1.32 6 39 467 1.39 19 326 468 1.6 12 4 469 1.91 19 134 470 2.34 1 40 471 1.08 30 40 472 2.56 26 161 473 1.41 15 74 474 1.41 1 258 475 1.32 19 144 476 1.15 1 41 477 1.9 17 41 478 0.378 20 268 479 3.07 19 268 480 1.41 10 268 481 1.59 30 268 482 1.39 19 164 483 1.52 5 512 484 1.25 1 43 485 1.74 24 191 486 1.94 19 77 487 1.31 19 410 488 1.44 2 410 489 1.01 20 288 490 1.68 19 102 491 2.31 10 122 492 2.2 1 53 493 3.01 19 75 494 1.7 16 75 495 1.39 19 171 496 2.31 10 121 497 1.41 15 70 498 1.42 6 218 499 1.61 24 131 500 1.31 1 71 501 1.32 15 71 502 1.36 19 228 503 2.05 24 151 504 1.26 15 73 505 1.98 24 73 506 1.35 19 248 507 1.39 19 167 508 1.01 20 290 509 1.39 20 173 510 1.39 20 174 511 1.39 20 175 512 1.39 20 176 513 1.39 20 177 514 1.94 16 183 515 1.94 16 184 516 1.77 1 185 517 1.56 24 185 518 1.14 1 186 519 1.64 24 186 520 1.74 24 187 521 1.84 3 193 522 1.27 5 193 523 1.52 4 193 524 1.92 4 194 525 1.92 4 195 526 1.92 4 196 527 1.7 1 197 528 1.65 3 197 529 1.13 5 197 530 1.37 4 197 531 3.56 26 200 532 1.2 19 201 533 2.1 20 12 534 1.2 19 202 535 1.2 19 203 536 1.2 19 204 537 1.23 1 58 538 1.2 19 207 539 2.77 20 330 540 1.2 19 208 541 1.51 19 91 542 2.77 20 331 543 1.2 19 209 544 1.57 19 25 545 3.46 16 25 546 2.77 20 332 547 1.2 19 210 548 1.2 19 211 549 2.1 20 13 550 3.51 7 212 551 3.51 7 213 552 1.42 6 214 553 2.83 6 215 554 0.907 20 219 555 1.36 6 219 556 1.42 6 220 557 1.67 3 80 558 1.93 17 221 559 1.37 20 14 560 1.93 17 222 561 1.93 17 223 562 1.93 17 224 563 0.928 19 225 564 1.84 17 225 565 1.36 19 227 566 1.36 19 229 567 1.36 19 233 568 1.6 12 5 569 1.36 19 234 570 1.36 19 235 571 2.23 26 236 572 2.23 26 237 573 2.12 12 243 574 2.12 12 244 575 1.35 19 245 576 1.35 19 246 577 1.35 19 247 578 1.41 1 253 579 1.41 1 254 580 1.41 1 255 581 1.41 1 256 582 1.41 1 257 583 1.38 19 263 584 1.38 19 264 585 1.38 19 265 586 1.33 19 266 587 1.33 19 267 588 1.28 20 273 589 2.11 24 273 590 2.2 24 274 591 2.2 24 275 592 2.2 24 276 593 2.2 24 277 594 1.09 20 283 595 1.09 20 284 596 0.505 20 285 597 3.05 5 285 598 1.01 20 286 599 1.01 20 287 600 1.01 20 293 601 1.35 19 294 602 1.35 19 295 603 1.35 19 296 604 1.35 19 297 605 1.39 20 178 606 1.35 19 300 607 1.39 20 179 608 2.14 20 301 609 2.47 20 22 610 0.935 20 302 611 0.935 20 303 612 1.61 19 304 613 1.41 15 68 614 1.61 19 307 615 1.21 20 308 616 2.13 20 309 617 1.74 24 188 618 1.21 20 310 619 1.74 24 189 620 1.21 20 311 621 1.31 19 23 622 1.21 20 312 623 0.929 20 313 624 1.42 5 313 625 2.05 30 314 626 2.05 30 315 627 1.3 19 319 628 1.84 1 319 629 1.92 4 198 630 2.05 1 320 631 1.51 19 90 632 3.56 26 199 633 2.05 1 321 634 1.31 19 24 635 2.05 1 322 636 1.39 19 323 637 1.39 19 324 638 1.39 19 325 639 1.39 19 327 640 1.39 19 329 641 1.39 20 333 642 1.82 15 333 643 3.18 16 333 644 1.6 12 6 645 2.93 15 334 646 2.93 15 335 647 2.93 15 336 > > > > cleanEx() > nameEx("maxent-package") > ### * maxent-package > > flush(stderr()); flush(stdout()) > > ### Name: maxent-package > ### Title: Low-memory Multinomial Logistic Regression with Support for Text > ### Classification > ### Aliases: maxent-package > ### Keywords: package > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # TRAIN/PREDICT USING SPARSEM REPRESENTATION > sparse <- as.compressed.matrix(matrix) > model <- maxent(sparse[1:100,],data$Topic.Code[1:100]) > results <- predict(model,sparse[101:150,]) > > > > cleanEx() > nameEx("maxent") > ### * maxent > > flush(stderr()); flush(stdout()) > > ### Name: maxent > ### Title: trains a maximum entropy model given a training matrix and a > ### vector or factor of labels. > ### Aliases: maxent > ### Keywords: methods > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # TRAIN USING SPARSEM REPRESENTATION > sparse <- as.compressed.matrix(matrix) > model <- maxent(sparse[1:100,],as.factor(data$Topic.Code)[1:100]) > > # A DIFFERENT EXAMPLE (taken from package e10711) > # CREATE DATA > x <- seq(0.1, 5, by = 0.05) > y <- log(x) + rnorm(x, sd = 0.2) > > # ESTIMATE MODEL AND PREDICT INPUT VALUES > m <- maxent(x, y) maxent.h:213:17: runtime error: left shift of 926431026 by 4 places cannot be represented in type 'int' #0 0x7f0d0922b3db in ME_Model::hashfun_str::operator()(std::__cxx11::basic_string, std::allocator > const&) const /data/gannet/ripley/R/packages/tests-gcc-SAN/maxent/src/maxent.h:213 #1 0x7f0d0922b3db in std::__detail::_Hash_code_base, std::allocator >, std::pair, std::allocator > const, int>, std::__detail::_Select1st, ME_Model::hashfun_str, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, true>::_M_hash_code(std::__cxx11::basic_string, std::allocator > const&) const /usr/include/c++/8/bits/hashtable_policy.h:1387 #2 0x7f0d0922b3db in std::_Hashtable, std::allocator >, std::pair, std::allocator > const, int>, std::allocator, std::allocator > const, int> >, std::__detail::_Select1st, std::equal_to, std::allocator > >, ME_Model::hashfun_str, std::__detail::_Mod_range_hashing, std::__detail::_Default_ranged_hash, std::__detail::_Prime_rehash_policy, std::__detail::_Hashtable_traits >::find(std::__cxx11::basic_string, std::allocator > const&) /usr/include/c++/8/bits/hashtable.h:1439 #3 0x7f0d091cf657 in std::unordered_map, std::allocator >, int, ME_Model::hashfun_str, std::equal_to, std::allocator > >, std::allocator, std::allocator > const, int> > >::find(std::__cxx11::basic_string, std::allocator > const&) /usr/include/c++/8/bits/unordered_map.h:921 #4 0x7f0d091cf657 in ME_Model::StringBag::Put(std::__cxx11::basic_string, std::allocator > const&) /data/gannet/ripley/R/packages/tests-gcc-SAN/maxent/src/maxent.h:262 #5 0x7f0d091cf657 in ME_Model::add_training_sample(ME_Sample const&) /data/gannet/ripley/R/packages/tests-gcc-SAN/maxent/src/maxent.cpp:280 #6 0x7f0d092e8bd5 in add_samples(int, int, std::vector, std::allocator >, std::allocator, std::allocator > > >, std::vector >, std::vector, std::allocator >, std::allocator, std::allocator > > >, std::vector >) /data/gannet/ripley/R/packages/tests-gcc-SAN/maxent/src/rmaxent.cpp:37 #7 0x7f0d0936f7aa in Rcpp::CppFunction6, std::allocator >, std::allocator, std::allocator > > >, std::vector >, std::vector, std::allocator >, std::allocator, std::allocator > > >, std::vector > >::operator()(SEXPREC**) /data/gannet/ripley/R/test-3.6/Rcpp/include/Rcpp/module/Module_generated_CppFunction.h:646 #8 0x7f0d0a926760 in InternalFunction_invoke(SEXPREC*) /tmp/RtmpdlSSFU/R.INSTALL18f02cf1d20b/Rcpp/src/Module.cpp:122 #9 0x55b0d6 in do_External /data/gannet/ripley/R/svn/R-devel/src/main/dotcode.c:548 #10 0x646c20 in Rf_eval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:723 #11 0x64f246 in do_begin /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:2382 #12 0x646649 in Rf_eval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:695 #13 0x64b1c5 in R_execClosure /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:1780 #14 0x64d7b8 in Rf_applyClosure /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:1706 #15 0x62ccab in bcEval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:6739 #16 0x645abf in Rf_eval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:620 #17 0x64b1c5 in R_execClosure /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:1780 #18 0x64d7b8 in Rf_applyClosure /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:1706 #19 0x62ccab in bcEval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:6739 #20 0x645abf in Rf_eval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:620 #21 0x64b1c5 in R_execClosure /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:1780 #22 0x64d7b8 in Rf_applyClosure /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:1706 #23 0x645fc3 in Rf_eval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:743 #24 0x652511 in do_set /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:2807 #25 0x646649 in Rf_eval /data/gannet/ripley/R/svn/R-devel/src/main/eval.c:695 #26 0x6bdb6e in Rf_ReplIteration /data/gannet/ripley/R/svn/R-devel/src/main/main.c:260 #27 0x6bdb6e in Rf_ReplIteration /data/gannet/ripley/R/svn/R-devel/src/main/main.c:200 #28 0x6be220 in R_ReplConsole /data/gannet/ripley/R/svn/R-devel/src/main/main.c:310 #29 0x6be354 in run_Rmainloop /data/gannet/ripley/R/svn/R-devel/src/main/main.c:1086 #30 0x4180e8 in main /data/gannet/ripley/R/svn/R-devel/src/main/Rmain.c:29 #31 0x7f0d1be2411a in __libc_start_main (/lib64/libc.so.6+0x2311a) #32 0x41a819 in _start (/data/gannet/ripley/R/gcc-SAN/bin/exec/R+0x41a819) > new <- predict(m, x) > > # VISUALIZE > plot(x, y) > points(x, log(x), col = 2) > points(x, new[,1], col = 4) > > > > cleanEx() > nameEx("predict.maxent") > ### * predict.maxent > > flush(stderr()); flush(stdout()) > > ### Name: predict.maxent > ### Title: predicts the expected label of a document given a trained model. > ### Aliases: predict.maxent > ### Keywords: methods > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # TRAIN/PREDICT USING SPARSEM REPRESENTATION > sparse <- as.compressed.matrix(matrix) > model <- maxent(sparse[1:100,],as.factor(data$Topic.Code)[1:100]) > results <- predict(model,sparse[101:150,]) > > > > cleanEx() > nameEx("save.model") > ### * save.model > > flush(stderr()); flush(stdout()) > > ### Name: save.model > ### Title: saves a maximum entropy model to a file. > ### Aliases: save.model > ### Keywords: methods > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # READ THE DATA, PREPARE THE CORPUS, and CREATE THE MATRIX > data <- read.csv(system.file("data/NYTimes.csv.gz",package="maxent")) > corpus <- Corpus(VectorSource(data$Title[1:150])) > matrix <- DocumentTermMatrix(corpus) > > # TRAIN USING SPARSEM REPRESENTATION > sparse <- as.compressed.matrix(matrix) > model <- maxent(sparse[1:100,],as.factor(data$Topic.Code)[1:100]) > save.model(model,"myModel") > > # TRAIN USING REGULAR MATRIX REPRESENTATION > model <- maxent(as.matrix(matrix)[1:100,],as.factor(data$Topic.Code)[1:100]) > save.model(model,"myModel") > > > > cleanEx() > nameEx("tune.maxent") > ### * tune.maxent > > flush(stderr()); flush(stdout()) > > ### Name: tune.maxent > ### Title: fits a maximum entropy model given a training matrix and a > ### vector or factor of labels. > ### Aliases: tune.maxent > ### Keywords: methods > > ### ** Examples > > # LOAD LIBRARY > library(maxent) > > # A DIFFERENT EXAMPLE > data(iris) > attach(iris) > > x <- subset(iris, select = -Species) > y <- Species > > f <- tune.maxent(x,y,nfold=3,showall=TRUE) > > > > ### *