【收藏】SearchCrawler By James Holmes
转自Crawling the Web with Java By James Holmes
无需任何扩展包,可直接运行。
- import java.awt.*;
- import java.awt.event.*;
- import java.io.*;
- import java.net.*;
- import java.util.*;
- import java.util.regex.*;
- import javax.swing.*;
- import javax.swing.table.*;
- // The Search Web Crawler
- public class SearchCrawler extends JFrame
- {
- // Max URLs drop-down values.
- private static final String[] MAX_URLS =
- {"50", "100", "500", "1000"};
- // Cache of robot disallow lists.
- private HashMap disallowListCache = new HashMap();
- // Search GUI controls.
- private JTextField startTextField;
- private JComboBox maxComboBox;
- private JCheckBox limitCheckBox;
- private JTextField logTextField;
- private JTextField searchTextField;
- private JCheckBox caseCheckBox;
- private JButton searchButton;
- // Search stats GUI controls.
- private JLabel crawlingLabel2;
- private JLabel crawledLabel2;
- private JLabel toCrawlLabel2;
- private JProgressBar progressBar;
- private JLabel matchesLabel2;
- // Table listing search matches.
- private JTable table;// Flag for whether or not crawling is underway.
- private boolean crawling;
- // Matches log file print writer.
- private PrintWriter logFileWriter;
- // Constructor for Search Web Crawler.
- public SearchCrawler()
- {
- // Set application title.
- setTitle("Search Crawler");
- // Set window size.
- setSize(600, 600);
- // Handle window closing events.
- addWindowListener(new WindowAdapter() {
- public void windowClosing(WindowEvent e) {
- actionExit();
- }
- });
- // Set up File menu.
- JMenuBar menuBar = new JMenuBar();
- JMenu fileMenu = new JMenu("File");
- fileMenu.setMnemonic(KeyEvent.VK_F);
- JMenuItem fileExitMenuItem = new JMenuItem("Exit",
- KeyEvent.VK_X);
- fileExitMenuItem.addActionListener(new ActionListener() {
- public void actionPerformed(ActionEvent e) {
- actionExit();
- }
- });
- fileMenu.add(fileExitMenuItem);
- menuBar.add(fileMenu);
- setJMenuBar(menuBar);
- // Set up search panel.
- JPanel searchPanel = new JPanel();
- GridBagConstraints constraints;
- GridBagLayout layout = new GridBagLayout();
- searchPanel.setLayout(layout);
- JLabel startLabel = new JLabel("Start URL:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(startLabel, constraints);
- searchPanel.add(startLabel);
- startTextField = new JTextField();
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 0, 5);
- layout.setConstraints(startTextField, constraints);
- searchPanel.add(startTextField);
- JLabel maxLabel = new JLabel("Max URLs to Crawl:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(maxLabel, constraints);
- searchPanel.add(maxLabel);
- maxComboBox = new JComboBox(MAX_URLS);
- maxComboBox.setEditable(true);
- constraints = new GridBagConstraints();
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(maxComboBox, constraints);
- searchPanel.add(maxComboBox);
- limitCheckBox =
- new JCheckBox("Limit crawling to Start URL site");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.WEST;
- constraints.insets = new Insets(0, 10, 0, 0);
- layout.setConstraints(limitCheckBox, constraints);
- searchPanel.add(limitCheckBox);
- JLabel blankLabel = new JLabel();
- constraints = new GridBagConstraints();
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- layout.setConstraints(blankLabel, constraints);
- searchPanel.add(blankLabel);
- JLabel logLabel = new JLabel("Matches Log File:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(logLabel, constraints);
- searchPanel.add(logLabel);
- String file =
- System.getProperty("user.dir") +
- System.getProperty("file.separator") +
- "crawler.log";
- logTextField = new JTextField(file);
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 0, 5);
- layout.setConstraints(logTextField, constraints);
- searchPanel.add(logTextField);
- JLabel searchLabel = new JLabel("Search String:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(searchLabel, constraints);
- searchPanel.add(searchLabel);
- searchTextField = new JTextField();
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.insets = new Insets(5, 5, 0, 0);
- constraints.gridwidth= 2;
- constraints.weightx = 1.0d;
- layout.setConstraints(searchTextField, constraints);
- searchPanel.add(searchTextField);
- caseCheckBox = new JCheckBox("Case Sensitive");
- constraints = new GridBagConstraints();
- constraints.insets = new Insets(5, 5, 0, 5);
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- layout.setConstraints(caseCheckBox, constraints);
- searchPanel.add(caseCheckBox);
- searchButton = new JButton("Search");
- searchButton.addActionListener(new ActionListener() {
- public void actionPerformed(ActionEvent e) {
- actionSearch();
- }
- });
- constraints = new GridBagConstraints();
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 5, 5);
- layout.setConstraints(searchButton, constraints);
- searchPanel.add(searchButton);
- JSeparator separator = new JSeparator();
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 5, 5);
- layout.setConstraints(separator, constraints);
- searchPanel.add(separator);
- JLabel crawlingLabel1 = new JLabel("Crawling:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(crawlingLabel1, constraints);
- searchPanel.add(crawlingLabel1);
- crawlingLabel2 = new JLabel();
- crawlingLabel2.setFont(
- crawlingLabel2.getFont().deriveFont(Font.PLAIN));
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 0, 5);
- layout.setConstraints(crawlingLabel2, constraints);
- searchPanel.add(crawlingLabel2);
- JLabel crawledLabel1 = new JLabel("Crawled URLs:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(crawledLabel1, constraints);
- searchPanel.add(crawledLabel1);
- crawledLabel2 = new JLabel();
- crawledLabel2.setFont(
- crawledLabel2.getFont().deriveFont(Font.PLAIN));
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 0, 5);
- layout.setConstraints(crawledLabel2, constraints);
- searchPanel.add(crawledLabel2);
- JLabel toCrawlLabel1 = new JLabel("URLs to Crawl:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(toCrawlLabel1, constraints);
- searchPanel.add(toCrawlLabel1);
- toCrawlLabel2 = new JLabel();
- toCrawlLabel2.setFont(
- toCrawlLabel2.getFont().deriveFont(Font.PLAIN));
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 0, 5);
- layout.setConstraints(toCrawlLabel2, constraints);
- searchPanel.add(toCrawlLabel2);
- JLabel progressLabel = new JLabel("Crawling Progress:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 0, 0);
- layout.setConstraints(progressLabel, constraints);
- searchPanel.add(progressLabel);
- progressBar = new JProgressBar();
- progressBar.setMinimum(0);
- progressBar.setStringPainted(true);
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 0, 5);
- layout.setConstraints(progressBar, constraints);
- searchPanel.add(progressBar);
- JLabel matchesLabel1 = new JLabel("Search Matches:");
- constraints = new GridBagConstraints();
- constraints.anchor = GridBagConstraints.EAST;
- constraints.insets = new Insets(5, 5, 10, 0);
- layout.setConstraints(matchesLabel1, constraints);
- searchPanel.add(matchesLabel1);matchesLabel2 = new JLabel();
- matchesLabel2.setFont(
- matchesLabel2.getFont().deriveFont(Font.PLAIN));
- constraints = new GridBagConstraints();
- constraints.fill = GridBagConstraints.HORIZONTAL;
- constraints.gridwidth = GridBagConstraints.REMAINDER;
- constraints.insets = new Insets(5, 5, 10, 5);
- layout.setConstraints(matchesLabel2, constraints);
- searchPanel.add(matchesLabel2);
- // Set up matches table.
- table =
- new JTable(new DefaultTableModel(new Object[][]{},
- new String[]{"URL"}) {
- public boolean isCellEditable(int row, int column)
- {
- return false;
- }
- });
- // Set up Matches panel.
- JPanel matchesPanel = new JPanel();
- matchesPanel.setBorder(
- BorderFactory.createTitledBorder("Matches"));
- matchesPanel.setLayout(new BorderLayout());
- matchesPanel.add(new JScrollPane(table),
- BorderLayout.CENTER);
- // Add panels to display.
- getContentPane().setLayout(new BorderLayout());
- getContentPane().add(searchPanel, BorderLayout.NORTH);
- getContentPane().add(matchesPanel, BorderLayout.CENTER);
- }
- // Exit this program.
- private void actionExit() {
- System.exit(0);
- }
- // Handle Search/Stop button being clicked.
- private void actionSearch() {
- // If stop button clicked, turn crawling flag off.
- if (crawling) {
- crawling = false;
- return;
- }
- ArrayList errorList = new ArrayList();
- // Validate that start URL has been entered.
- String startUrl = startTextField.getText().trim();
- if (startUrl.length() < 1) {
- errorList.add("Missing Start URL.");
- }
- // Verify start URL.
- else if (verifyUrl(startUrl) == null) {
- errorList.add("Invalid Start URL.");
- }
- // Validate that Max URLs is either empty or is a number.
- int maxUrls = 0;
- String max = ((String) maxComboBox.getSelectedItem()).trim();
- if (max.length() > 0) {
- try {
- maxUrls = Integer.parseInt(max);
- } catch (NumberFormatException e) {
- }
- if (maxUrls < 1) {
- errorList.add("Invalid Max URLs value.");
- }
- }
- // Validate that matches log file has been entered.
- String logFile = logTextField.getText().trim();
- if (logFile.length() < 1) {
- errorList.add("Missing Matches Log File.");
- }
- // Validate that search string has been entered.
- String searchString = searchTextField.getText().trim();
- if (searchString.length() < 1) {
- errorList.add("Missing Search String.");
- }
- // Show errors, if any, and return.
- if (errorList.size() > 0) {
- StringBuffer message = new StringBuffer();
- // Concatenate errors into single message.
- for (int i = 0; i < errorList.size(); i++) {
- message.append(errorList.get(i));
- if (i + 1 < errorList.size()) {
- message.append("\n");
- }
- }
- showError(message.toString());
- return;
- }
- // Remove "www" from start URL if present.
- startUrl = removeWwwFromUrl(startUrl);
- // Start the Search Crawler.
- search(logFile, startUrl, maxUrls, searchString);
- }
- private void search(final String logFile, final String startUrl,
- final int maxUrls, final String searchString)
- {
- // Start the search in a new thread.
- Thread thread = new Thread(new Runnable() {
- public void run() {
- // Show hour glass cursor while crawling is under way.
- setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
- // Disable search controls.
- startTextField.setEnabled(false);
- maxComboBox.setEnabled(false);
- limitCheckBox.setEnabled(false);
- logTextField.setEnabled(false);
- searchTextField.setEnabled(false);
- caseCheckBox.setEnabled(false);
- // Switch Search button to "Stop."
- searchButton.setText("Stop");
- // Reset stats.
- table.setModel(new DefaultTableModel(new Object[][]{},
- new String[]{"URL"}) {
- public boolean isCellEditable(int row, int column)
- {
- return false;
- }
- });
- updateStats(startUrl, 0, 0, maxUrls);
- // Open matches log file.
- try {
- logFileWriter = new PrintWriter(new FileWriter(logFile));
- } catch (Exception e) {
- showError("Unable to open matches log file.");
- return;
- }
- // Turn crawling flag on.
- crawling = true;
- // Perform the actual crawling.
- crawl(startUrl, maxUrls, limitCheckBox.isSelected(),
- searchString, caseCheckBox.isSelected());
- // Turn crawling flag off.
- crawling = false;
- // Close matches log file.
- try {
- logFileWriter.close();
- } catch (Exception e) {
- showError("Unable to close matches log file.");
- }
- // Mark search as done.
- crawlingLabel2.setText("Done");
- // Enable search controls.
- startTextField.setEnabled(true);
- maxComboBox.setEnabled(true);
- limitCheckBox.setEnabled(true);
- logTextField.setEnabled(true);
- searchTextField.setEnabled(true);
- caseCheckBox.setEnabled(true);
- // Switch search button back to "Search."
- searchButton.setText("Search");
- // Return to default cursor.
- setCursor(Cursor.getDefaultCursor());
- // Show message if search string not found.
- if (table.getRowCount() == 0) {
- JOptionPane.showMessageDialog(SearchCrawler.this,
- "Your Search String was not found. Please try another.",
- "Search String Not Found",
- JOptionPane.WARNING_MESSAGE);
- }
- }
- });
- thread.start();
- }
- // Show dialog box with error message.
- private void showError(String message) {
- JOptionPane.showMessageDialog(this, message, "Error",
- JOptionPane.ERROR_MESSAGE);
- }
- // Update crawling stats.
- private void updateStats(
- String crawling, int crawled, int toCrawl, int maxUrls)
- {
- crawlingLabel2.setText(crawling);
- crawledLabel2.setText("" + crawled);
- toCrawlLabel2.setText("" + toCrawl);
- // Update progress bar.
- if (maxUrls == -1) {
- progressBar.setMaximum(crawled + toCrawl);
- } else {
- progressBar.setMaximum(maxUrls);
- }
- progressBar.setValue(crawled);
- matchesLabel2.setText("" + table.getRowCount());
- }
- // Add match to matches table and log file.
- private void addMatch(String url) {
- // Add URL to matches table.
- DefaultTableModel model =
- (DefaultTableModel) table.getModel();
- model.addRow(new Object[]{url});
- // Add URL to matches log file.
- try {
- logFileWriter.println(url);
- } catch (Exception e) {
- showError("Unable to log match.");
- }
- }
- // Verify URL format.
- private URL verifyUrl(String url) {
- // Only allow HTTP URLs.
- if (!url.toLowerCase().startsWith("http://"))
- return null;
- // Verify format of URL.
- URL verifiedUrl = null;
- try {
- verifiedUrl = new URL(url);
- } catch (Exception e) {
- return null;
- }
- return verifiedUrl;
- }
- // Check if robot is allowed to access the given URL.
- private boolean isRobotAllowed(URL urlToCheck) {
- String host = urlToCheck.getHost().toLowerCase();
- // Retrieve host's disallow list from cache.
- ArrayList disallowList =
- (ArrayList) disallowListCache.get(host);
- // If list is not in the cache, download and cache it.
- if (disallowList == null) {
- disallowList = new ArrayList();
- try {
- URL robotsFileUrl =
- new URL("http://" + host + "/robots.txt");
- // Open connection to robot file URL for reading.
- BufferedReader reader =
- new BufferedReader(new InputStreamReader(
- robotsFileUrl.openStream()));
- // Read robot file, creating list of disallowed paths.
- String line;
- while ((line = reader.readLine()) != null) {
- if (line.indexOf("Disallow:") == 0) {
- String disallowPath =
- line.substring("Disallow:".length());
- // Check disallow path for comments and remove if present.
- int commentIndex = disallowPath.indexOf("#");
- if (commentIndex != - 1) {
- disallowPath =
- disallowPath.substring(0, commentIndex);
- }
- // Remove leading or trailing spaces from disallow path.
- disallowPath = disallowPath.trim();
- // Add disallow path to list.
- disallowList.add(disallowPath);
- }
- }
- // Add new disallow list to cache.
- disallowListCache.put(host, disallowList);
- }
- catch (Exception e) {
- /* Assume robot is allowed since an exception
- is thrown if the robot file doesn't exist. */
- return true;
- }
- }
- /* Loop through disallow list to see if
- crawling is allowed for the given URL. */
- String file = urlToCheck.getFile();
- for (int i = 0; i < disallowList.size(); i++) {
- String disallow = (String) disallowList.get(i);
- if (file.startsWith(disallow)) {
- return false;
- }
- }
- return true;
- }
- // Download page at given URL.
- private String downloadPage(URL pageUrl) {
- try {
- // Open connection to URL for reading.
- BufferedReader reader =
- new BufferedReader(new InputStreamReader(
- pageUrl.openStream()));
- // Read page into buffer.
- String line;
- StringBuffer pageBuffer = new StringBuffer();
- while ((line = reader.readLine()) != null) {
- pageBuffer.append(line);
- }
- return pageBuffer.toString();
- } catch (Exception e) {
- }
- return null;
- }
- // Remove leading "www" from a URL's host if present.
- private String removeWwwFromUrl(String url) {
- int index = url.indexOf("://www.");
- if (index != -1) {
- return url.substring(0, index + 3) +
- url.substring(index + 7);
- }
- return (url);
- }
- // Parse through page contents and retrieve links.
- private ArrayList retrieveLinks(
- URL pageUrl, String pageContents, HashSet crawledList,
- boolean limitHost)
- {
- // Compile link matching pattern.
- Pattern p =
- Pattern.compile("<a\\s+href\\s*=\\s*\"?(.*?)[\"|>]",
- Pattern.CASE_INSENSITIVE);
- Matcher m = p.matcher(pageContents);
- // Create list of link matches.
- ArrayList linkList = new ArrayList();
- while (m.find()) {
- String link = m.group(1).trim();
- // Skip empty links.
- if (link.length() < 1) {
- continue;
- }
- // Skip links that are just page anchors.
- if (link.charAt(0) == '#') {
- continue;
- }
- // Skip mailto links.
- if (link.indexOf("mailto:") != -1) {
- continue;
- }
- // Skip JavaScript links.
- if (link.toLowerCase().indexOf("javascript") != -1) {
- continue;
- }
- // Prefix absolute and relative URLs if necessary.
- if (link.indexOf("://") == -1) {
- // Handle absolute URLs.
- if (link.charAt(0) == '/') {
- link = "http://" + pageUrl.getHost() + link;
- // Handle relative URLs.
- } else {
- String file = pageUrl.getFile();
- if (file.indexOf('/') == -1) {
- link = "http://" + pageUrl.getHost() + "/" + link;
- } else {
- String path =
- file.substring(0, file.lastIndexOf('/') + 1);
- link = "http://" + pageUrl.getHost() + path + link;
- }
- }
- }
- // Remove anchors from link.
- int index = link.indexOf('#');
- if (index != -1) {
- link = link.substring(0, index);
- }
- // Remove leading "www" from URL's host if present.
- link = removeWwwFromUrl(link);
- // Verify link and skip if invalid.
- URL verifiedLink = verifyUrl(link);
- if (verifiedLink == null) {
- continue;
- }
- /* If specified, limit links to those
- having the same host as the start URL. */
- if (limitHost &&
- !pageUrl.getHost().toLowerCase().equals(
- verifiedLink.getHost().toLowerCase()))
- {
- continue;
- }
- // Skip link if it has already been crawled.
- if (crawledList.contains(link)) {
- continue;
- }
- // Add link to list.
- linkList.add(link);
- }
- return (linkList);
- }
- /* Determine whether or not search string is
- matched in the given page contents. */
- private boolean searchStringMatches(
- String pageContents, String searchString,
- boolean caseSensitive)
- {
- String searchContents = pageContents;
- /* If case-sensitive search, lowercase
- page contents for comparison. */
- if (!caseSensitive) {
- searchContents = pageContents.toLowerCase();
- }// Split search string into individual terms.
- Pattern p = Pattern.compile("[\\s]+");
- String[] terms = p.split(searchString);
- // Check to see if each term matches.
- for (int i = 0; i < terms.length; i++) {
- if (caseSensitive) {
- if (searchContents.indexOf(terms[i]) == -1) {
- return false;
- }
- } else {
- if (searchContents.indexOf(terms[i].toLowerCase()) == -1) {
- return false;
- }
- }
- }
- return true;
- }
- // Perform the actual crawling, searching for the search string.
- public void crawl(
- String startUrl, int maxUrls, boolean limitHost,
- String searchString, boolean caseSensitive)
- {
- // Set up crawl lists.
- HashSet crawledList = new HashSet();
- LinkedHashSet toCrawlList = new LinkedHashSet();
- // Add start URL to the to crawl list.
- toCrawlList.add(startUrl);
- /* Perform actual crawling by looping
- through the To Crawl list. */
- while (crawling && toCrawlList.size() > 0)
- {
- /* Check to see if the max URL count has
- been reached, if it was specified.*/
- if (maxUrls != -1) {
- if (crawledList.size() == maxUrls) {
- break;
- }
- }
- // Get URL at bottom of the list.
- String url = (String) toCrawlList.iterator().next();
- // Remove URL from the To Crawl list.
- toCrawlList.remove(url);
- // Convert string url to URL object.
- URL verifiedUrl = verifyUrl(url);
- // Skip URL if robots are not allowed to access it.
- if (!isRobotAllowed(verifiedUrl)) {
- continue;
- }
- // Update crawling stats.
- updateStats(url, crawledList.size(), toCrawlList.size(),
- maxUrls);
- // Add page to the crawled list.
- crawledList.add(url);
- // Download the page at the given URL.
- String pageContents = downloadPage(verifiedUrl);
- /* If the page was downloaded successfully, retrieve all its
- links and then see if it contains the search string. */
- if (pageContents != null && pageContents.length() > 0)
- {
- // Retrieve list of valid links from page.
- ArrayList links =
- retrieveLinks(verifiedUrl, pageContents, crawledList,
- limitHost);
- // Add links to the To Crawl list.
- toCrawlList.addAll(links);
- /* Check if search string is present in
- page, and if so, record a match. */
- if (searchStringMatches(pageContents, searchString,
- caseSensitive))
- {
- addMatch(url);
- }
- }
- // Update crawling stats.
- updateStats(url, crawledList.size(), toCrawlList.size(),
- maxUrls);
- }
- }
- // Run the Search Crawler.
- public static void main(String[] args) {
- SearchCrawler crawler = new SearchCrawler();
- crawler.show();
- }
- }
【收藏】SearchCrawler By James Holmes的更多相关文章
- php面试题及答案收藏(转)
php面试题及答案收藏(这套试题是在网上看到的,不知作者是谁) 基础题 1.表单中 get与post提交方法的区别? 答:get是发送请求HTTP协议通过url参数传递进行接收,而post是实体数据, ...
- PHP常见的一些问题总结(收藏)
本篇文章给大家带来的内容是关于PHP常见的一些问题总结(收藏),有一定的参考价值,有需要的朋友可以参考一下,希望对你有所帮助. 1. 字符串定义的时候单引号和双引号有什么区别? 单引号加载速度比双引号 ...
- JavaScript初学者福利!必须收藏的24条小技巧
JavaScript初学者福利!必须收藏的24条小技巧 前端小编 发布于 2013-12-15 22:52 查看数: 2343 评论数: 6 帖子模式 这篇文章将回顾JavaScript的知识 !如果 ...
- ZT 感触的屌丝职场记 投递人 itwriter 发布于 2013-05-27 09:21 评论(18) 有3402人阅读 原文链接 [收藏] « » 作者@幻想哥呀幻想哥 有一位屌丝男,从小抱着报效祖国的理想上了大学,毕业后干了 IT 行业,高中那时候看文汇报说,搞 IT 的在上
屌丝职场记 投递人 itwriter 发布于 2013-05-27 09:21 评论(18) 有3402人阅读 原文链接 [收藏] « » 作者@幻想哥呀幻想哥 有一位屌丝男,从小抱着报效祖国的 ...
- [转帖]可能是东半球最好的 Curl 学习指南,强烈建议收藏!
可能是东半球最好的 Curl 学习指南,强烈建议收藏! http://www.itpub.net/2019/09/30/3302/ 记得转帖过.. 简介 curl 是常用的命令行工具,用来请求 Web ...
- h5应用缓存及收藏时Icon显示
h5应用实现离线缓存,加载后,断网仍然可以继续使用. 一.需求 转行做h5,目前做赛道游戏,动手做了个赛道编辑器web版的,由于web版需要开启服务器才能使用,策划要想回家使用就要发布到外网服务器了, ...
- 基于Metronic的Bootstrap开发框架经验总结(13)--页面链接收藏夹功能的实现2(利用Sortable进行拖动排序)
在上篇随笔<基于Metronic的Bootstrap开发框架经验总结(12)--页面链接收藏夹功能的实现>上,我介绍了链接收藏夹功能的实现,以及对收藏记录的排序处理.该篇随笔主要使用功能按 ...
- 修复 Windows7 资源管理器左侧收藏夹无法展开问题
相信大家在网上搜多到的解决办法大多数都是修改注册表,但是这个办法多数是无效的 1.运行regedit 2.展开到HKEY_CLASSES_ROOT\lnkfile 3.添加一个字符串值:IsShort ...
- JavaMail和James
JavaMail,顾名思义,提供给开发者处理电子邮件相关的编程接口.它是Sun发布的用来处理email的API.它可以方便地执行一些常用的邮件传输.我们可以基于JavaMail开发出类似于Micr ...
随机推荐
- 解决easyui tabs中href无法跨域跳转
<!DOCTYPE HTML> <html> <head> <meta http-equiv="content-type" content ...
- 易于同其它View框架(Tiles等)无缝集成,采用IOC便于测试
Lifecycle for overriding binding, validation, etc,易于同其它View框架(Tiles等)无缝集成,采用IOC便于测试. 它是一个典型的教科书式的mvc ...
- python 函数 参数 (难点传入dict list)
--使用参数组可以传值列表,字典:格式 #-*-coding:utf-8-*- def func5(x,*s,**gs): print(x) print(s) print(gs) print '*'* ...
- redis的下载
网址一:https://github.com/dmajkic/redis/downloads 网址二:http://windows.php.net/downloads/pecl/releases/re ...
- thinkPHP为什么设置一个单入口文件?
TP3.2的具体解释: ThinkPHP采用单一入口模式进行项目部署和访问,无论完成什么功能,一个应用都有一个统一(但不一定是唯一)的入口. 应该说,所有应用都是从入口文件开始的,并且不同应用的入口文 ...
- Log4j 使用
源博客 http://www.cnblogs.com/alipayhutu/archive/2012/06/21/2558249.html#3159794 [1]从零开始 a). 新建Java Pro ...
- 5-1、easyUI-菜单与按钮(上节问题与解决)
首先把上节的代码copy过来,如下: <html> <head> <meta http-equiv="Content-Type" content=&q ...
- PHP面向过程和面向对象
php程序编写分为面向过程和面向对象.两者在功能实现上没有区别,但是在代码编写上区别很大,面向过程的代码很乱,不易管理,而面向对象把常用的功能封装为一个类,这样代码清楚多了. 下面举个小例子说明一下: ...
- [Sdoi2011]火星移民
2283: [Sdoi2011]火星移民 Time Limit: 40 Sec Memory Limit: 512 MBSubmit: 119 Solved: 56[Submit][Status] ...
- java学习笔记——数据类型及类型转换
数据类型分为: 1.引用类型(字符型); 2.基本数据类型(数值型); 以下为基本数据类型介绍(括号内的数字表示该类型所占据的字节数) a.整型 byte(8) short(16) int(3 ...